hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
310a1f75f8e994cae384289fcac62edd402ff301
| 29,315
|
py
|
Python
|
tests/ignite/metrics/test_recall.py
|
sherry0219/ignite
|
a4617c6d24f5c095de4e99ba82f6e130350fa2a2
|
[
"BSD-3-Clause"
] | 1
|
2020-09-18T05:16:23.000Z
|
2020-09-18T05:16:23.000Z
|
tests/ignite/metrics/test_recall.py
|
ANUBHAVNATANI/ignite
|
e96203f05a5d2da9226169fbab13d56ece675e41
|
[
"BSD-3-Clause"
] | null | null | null |
tests/ignite/metrics/test_recall.py
|
ANUBHAVNATANI/ignite
|
e96203f05a5d2da9226169fbab13d56ece675e41
|
[
"BSD-3-Clause"
] | 1
|
2020-01-30T21:20:28.000Z
|
2020-01-30T21:20:28.000Z
|
import pytest
import warnings
from sklearn.metrics import recall_score
from sklearn.exceptions import UndefinedMetricWarning
from ignite.exceptions import NotComputableError
from ignite.metrics import Recall
import torch
torch.manual_seed(12)
def test_no_update():
recall = Recall()
with pytest.raises(NotComputableError):
recall.compute()
recall = Recall(is_multilabel=True, average=True)
with pytest.raises(NotComputableError):
recall.compute()
def test_binary_wrong_inputs():
re = Recall()
with pytest.raises(ValueError):
# y has not only 0 or 1 values
re.update((torch.randint(0, 2, size=(10,)),
torch.arange(0, 10).type(torch.LongTensor)))
with pytest.raises(ValueError):
# y_pred values are not thresholded to 0, 1 values
re.update((torch.rand(10, 1),
torch.randint(0, 2, size=(10,)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible shapes
re.update((torch.randint(0, 2, size=(10,)),
torch.randint(0, 2, size=(10, 5)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible shapes
re.update((torch.randint(0, 2, size=(10, 5, 6)),
torch.randint(0, 2, size=(10,)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible shapes
re.update((torch.randint(0, 2, size=(10,)),
torch.randint(0, 2, size=(10, 5, 6)).type(torch.LongTensor)))
def test_binary_input_N():
# Binary accuracy on input of shape (N, 1) or (N, )
def _test(average):
re = Recall(average=average)
y_pred = torch.randint(0, 2, size=(10, 1))
y = torch.randint(0, 2, size=(10,)).type(torch.LongTensor)
re.update((y_pred, y))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(re_compute)
re.reset()
y_pred = torch.randint(0, 2, size=(10,))
y = torch.randint(0, 2, size=(10,)).type(torch.LongTensor)
re.update((y_pred, y))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(re_compute)
re.reset()
y_pred = torch.Tensor([0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.51])
y_pred = torch.round(y_pred)
y = torch.randint(0, 2, size=(10,)).type(torch.LongTensor)
re.update((y_pred, y))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(re_compute)
# Batched Updates
re.reset()
y_pred = torch.randint(0, 2, size=(100,))
y = torch.randint(0, 2, size=(100,)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx: idx + batch_size], y[idx: idx + batch_size]))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(re_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
def test_binary_input_NL():
# Binary accuracy on input of shape (N, L)
def _test(average):
re = Recall(average=average)
y_pred = torch.randint(0, 2, size=(10, 5))
y = torch.randint(0, 2, size=(10, 5)).type(torch.LongTensor)
re.update((y_pred, y))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
pr_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(pr_compute)
re.reset()
y_pred = torch.randint(0, 2, size=(10, 1, 5))
y = torch.randint(0, 2, size=(10, 1, 5)).type(torch.LongTensor)
re.update((y_pred, y))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(re_compute)
re = Recall(average=average)
# Batched Updates
re.reset()
y_pred = torch.randint(0, 2, size=(100, 5))
y = torch.randint(0, 2, size=(100, 1, 5)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx:idx + batch_size], y[idx:idx + batch_size]))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
pr_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(pr_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
def test_binary_input_NHW():
# Binary accuracy on input of shape (N, H, W)
def _test(average):
re = Recall(average=average)
y_pred = torch.randint(0, 2, size=(10, 12, 10))
y = torch.randint(0, 2, size=(10, 12, 10)).type(torch.LongTensor)
re.update((y_pred, y))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(re_compute)
re.reset()
y_pred = torch.randint(0, 2, size=(10, 1, 12, 10))
y = torch.randint(0, 2, size=(10, 1, 12, 10)).type(torch.LongTensor)
re.update((y_pred, y))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(re_compute)
re = Recall(average=average)
# Batched Updates
re.reset()
y_pred = torch.randint(0, 2, size=(100, 12, 10))
y = torch.randint(0, 2, size=(100, 1, 12, 10)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx:idx + batch_size], y[idx:idx + batch_size]))
np_y = y.numpy().ravel()
np_y_pred = y_pred.numpy().ravel()
assert re._type == 'binary'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
assert recall_score(np_y, np_y_pred, average='binary') == pytest.approx(re_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
def test_multiclass_wrong_inputs():
re = Recall()
with pytest.raises(ValueError):
# incompatible shapes
re.update((torch.rand(10, 5, 4), torch.randint(0, 2, size=(10,)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible shapes
re.update((torch.rand(10, 5, 6), torch.randint(0, 5, size=(10, 5)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible shapes
re.update((torch.rand(10), torch.randint(0, 5, size=(10, 5, 6)).type(torch.LongTensor)))
re = Recall(average=True)
with pytest.raises(ValueError):
# incompatible shapes between two updates
re.update((torch.rand(10, 5), torch.randint(0, 5, size=(10,)).type(torch.LongTensor)))
re.update((torch.rand(10, 6), torch.randint(0, 5, size=(10,)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible shapes between two updates
re.update((torch.rand(10, 5, 12, 14), torch.randint(0, 5, size=(10, 12, 14)).type(torch.LongTensor)))
re.update((torch.rand(10, 6, 12, 14), torch.randint(0, 5, size=(10, 12, 14)).type(torch.LongTensor)))
re = Recall(average=False)
with pytest.raises(ValueError):
# incompatible shapes between two updates
re.update((torch.rand(10, 5), torch.randint(0, 5, size=(10,)).type(torch.LongTensor)))
re.update((torch.rand(10, 6), torch.randint(0, 5, size=(10,)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible shapes between two updates
re.update((torch.rand(10, 5, 12, 14), torch.randint(0, 5, size=(10, 12, 14)).type(torch.LongTensor)))
re.update((torch.rand(10, 6, 12, 14), torch.randint(0, 5, size=(10, 12, 14)).type(torch.LongTensor)))
def test_multiclass_input_N():
# Multiclass input data of shape (N, ) and (N, C)
def _test(average):
re = Recall(average=average)
y_pred = torch.rand(20, 6)
y = torch.randint(0, 6, size=(20,)).type(torch.LongTensor)
re.update((y_pred, y))
num_classes = y_pred.shape[1]
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
np_y = y.numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
re.reset()
y_pred = torch.rand(10, 4)
y = torch.randint(0, 4, size=(10, 1)).type(torch.LongTensor)
re.update((y_pred, y))
num_classes = y_pred.shape[1]
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
np_y = y.numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
# 2-classes
re.reset()
y_pred = torch.rand(10, 2)
y = torch.randint(0, 2, size=(10, 1)).type(torch.LongTensor)
re.update((y_pred, y))
num_classes = y_pred.shape[1]
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
np_y = y.numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
# Batched Updates
re.reset()
y_pred = torch.rand(100, 3)
y = torch.randint(0, 3, size=(100,)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx: idx + batch_size], y[idx: idx + batch_size]))
num_classes = y_pred.shape[1]
np_y = y.numpy().ravel()
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
def test_multiclass_input_NL():
# Multiclass input data of shape (N, L) and (N, C, L)
def _test(average):
re = Recall(average=average)
y_pred = torch.rand(10, 5, 8)
y = torch.randint(0, 5, size=(10, 8)).type(torch.LongTensor)
re.update((y_pred, y))
num_classes = y_pred.shape[1]
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
np_y = y.numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average=sk_average_parameter) == pytest.approx(re_compute)
re.reset()
y_pred = torch.rand(15, 10, 8)
y = torch.randint(0, 10, size=(15, 8)).type(torch.LongTensor)
re.update((y_pred, y))
num_classes = y_pred.shape[1]
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
np_y = y.numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
# Batched Updates
re.reset()
y_pred = torch.rand(100, 8, 12)
y = torch.randint(0, 8, size=(100, 12)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx:idx + batch_size], y[idx:idx + batch_size]))
num_classes = y_pred.shape[1]
np_y = y.numpy().ravel()
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
def test_multiclass_input_NHW():
# Multiclass input data of shape (N, H, W, ...) and (N, C, H, W, ...)
def _test(average):
re = Recall(average=average)
y_pred = torch.rand(10, 5, 18, 16)
y = torch.randint(0, 5, size=(10, 18, 16)).type(torch.LongTensor)
re.update((y_pred, y))
num_classes = y_pred.shape[1]
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
np_y = y.numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
re.reset()
y_pred = torch.rand(10, 7, 20, 12)
y = torch.randint(0, 7, size=(10, 20, 12)).type(torch.LongTensor)
re.update((y_pred, y))
num_classes = y_pred.shape[1]
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
np_y = y.numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
# Batched Updates
re.reset()
y_pred = torch.rand(100, 10, 12, 14)
y = torch.randint(0, 10, size=(100, 12, 14)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx:idx + batch_size], y[idx:idx + batch_size]))
num_classes = y_pred.shape[1]
np_y = y.numpy().ravel()
np_y_pred = y_pred.argmax(dim=1).numpy().ravel()
assert re._type == 'multiclass'
assert isinstance(re.compute(), float if average else torch.Tensor)
re_compute = re.compute() if average else re.compute().numpy()
sk_average_parameter = 'macro' if average else None
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
sk_compute = recall_score(np_y, np_y_pred, labels=range(0, num_classes), average=sk_average_parameter)
assert sk_compute == pytest.approx(re_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
def test_multilabel_wrong_inputs():
re = Recall(average=True, is_multilabel=True)
with pytest.raises(ValueError):
# incompatible shapes
re.update((torch.randint(0, 2, size=(10,)), torch.randint(0, 2, size=(10,)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible y_pred
re.update((torch.rand(10, 5), torch.randint(0, 2, size=(10, 5)).type(torch.LongTensor)))
with pytest.raises(ValueError):
# incompatible y
re.update((torch.randint(0, 5, size=(10, 5, 6)), torch.rand(10)))
with pytest.raises(ValueError):
# incompatible shapes between two updates
re.update((torch.randint(0, 2, size=(20, 5)), torch.randint(0, 2, size=(20, 5)).type(torch.LongTensor)))
re.update((torch.randint(0, 2, size=(20, 6)), torch.randint(0, 2, size=(20, 6)).type(torch.LongTensor)))
def to_numpy_multilabel(y):
# reshapes input array to (N x ..., C)
y = y.transpose(1, 0).numpy()
num_classes = y.shape[0]
y = y.reshape((num_classes, -1)).transpose(1, 0)
return y
def test_multilabel_input_NC():
def _test(average):
re = Recall(average=average, is_multilabel=True)
y_pred = torch.randint(0, 2, size=(20, 5))
y = torch.randint(0, 2, size=(20, 5)).type(torch.LongTensor)
re.update((y_pred, y))
np_y_pred = to_numpy_multilabel(y_pred)
np_y = to_numpy_multilabel(y)
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
re.reset()
y_pred = torch.randint(0, 2, size=(10, 4))
y = torch.randint(0, 2, size=(10, 4)).type(torch.LongTensor)
re.update((y_pred, y))
np_y_pred = y_pred.numpy()
np_y = y.numpy()
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
# Batched Updates
re.reset()
y_pred = torch.randint(0, 2, size=(100, 4))
y = torch.randint(0, 2, size=(100, 4)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx: idx + batch_size], y[idx: idx + batch_size]))
np_y = y.numpy()
np_y_pred = y_pred.numpy()
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
re1 = Recall(is_multilabel=True, average=True)
re2 = Recall(is_multilabel=True, average=False)
y_pred = torch.randint(0, 2, size=(10, 4))
y = torch.randint(0, 2, size=(10, 4)).type(torch.LongTensor)
re1.update((y_pred, y))
re2.update((y_pred, y))
assert re1.compute() == pytest.approx(re2.compute().mean().item())
def test_multilabel_input_NCL():
def _test(average):
re = Recall(average=average, is_multilabel=True)
y_pred = torch.randint(0, 2, size=(10, 5, 10))
y = torch.randint(0, 2, size=(10, 5, 10)).type(torch.LongTensor)
re.update((y_pred, y))
np_y_pred = to_numpy_multilabel(y_pred)
np_y = to_numpy_multilabel(y)
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
re.reset()
y_pred = torch.randint(0, 2, size=(15, 4, 10))
y = torch.randint(0, 2, size=(15, 4, 10)).type(torch.LongTensor)
re.update((y_pred, y))
np_y_pred = to_numpy_multilabel(y_pred)
np_y = to_numpy_multilabel(y)
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
# Batched Updates
re.reset()
y_pred = torch.randint(0, 2, size=(100, 4, 12))
y = torch.randint(0, 2, size=(100, 4, 12)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx:idx + batch_size], y[idx:idx + batch_size]))
np_y = to_numpy_multilabel(y)
np_y_pred = to_numpy_multilabel(y_pred)
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
re1 = Recall(is_multilabel=True, average=True)
re2 = Recall(is_multilabel=True, average=False)
y_pred = torch.randint(0, 2, size=(10, 4, 20))
y = torch.randint(0, 2, size=(10, 4, 20)).type(torch.LongTensor)
re1.update((y_pred, y))
re2.update((y_pred, y))
assert re1.compute() == pytest.approx(re2.compute().mean().item())
def test_multilabel_input_NCHW():
def _test(average):
re = Recall(average=average, is_multilabel=True)
y_pred = torch.randint(0, 2, size=(10, 5, 18, 16))
y = torch.randint(0, 2, size=(10, 5, 18, 16)).type(torch.LongTensor)
re.update((y_pred, y))
np_y_pred = to_numpy_multilabel(y_pred)
np_y = to_numpy_multilabel(y)
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
re.reset()
y_pred = torch.randint(0, 2, size=(10, 4, 20, 23))
y = torch.randint(0, 2, size=(10, 4, 20, 23)).type(torch.LongTensor)
re.update((y_pred, y))
np_y_pred = to_numpy_multilabel(y_pred)
np_y = to_numpy_multilabel(y)
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
# Batched Updates
re.reset()
y_pred = torch.randint(0, 2, size=(100, 5, 12, 14))
y = torch.randint(0, 2, size=(100, 5, 12, 14)).type(torch.LongTensor)
batch_size = 16
n_iters = y.shape[0] // batch_size + 1
for i in range(n_iters):
idx = i * batch_size
re.update((y_pred[idx:idx + batch_size], y[idx:idx + batch_size]))
np_y = to_numpy_multilabel(y)
np_y_pred = to_numpy_multilabel(y_pred)
assert re._type == 'multilabel'
re_compute = re.compute() if average else re.compute().mean().item()
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=UndefinedMetricWarning)
assert recall_score(np_y, np_y_pred, average='samples') == pytest.approx(re_compute)
for _ in range(5):
_test(average=True)
_test(average=False)
re1 = Recall(is_multilabel=True, average=True)
re2 = Recall(is_multilabel=True, average=False)
y_pred = torch.randint(0, 2, size=(10, 4, 20, 23))
y = torch.randint(0, 2, size=(10, 4, 20, 23)).type(torch.LongTensor)
re1.update((y_pred, y))
re2.update((y_pred, y))
assert re1.compute() == pytest.approx(re2.compute().mean().item())
def test_incorrect_type():
# Tests changing of type during training
def _test(average):
re = Recall(average=average)
y_pred = torch.softmax(torch.rand(4, 4), dim=1)
y = torch.ones(4).type(torch.LongTensor)
re.update((y_pred, y))
y_pred = torch.zeros(4, 1)
y = torch.ones(4).type(torch.LongTensor)
with pytest.raises(RuntimeError):
re.update((y_pred, y))
_test(average=True)
_test(average=False)
re1 = Recall(is_multilabel=True, average=True)
re2 = Recall(is_multilabel=True, average=False)
y_pred = torch.randint(0, 2, size=(10, 4, 20, 23))
y = torch.randint(0, 2, size=(10, 4, 20, 23)).type(torch.LongTensor)
re1.update((y_pred, y))
re2.update((y_pred, y))
assert re1.compute() == pytest.approx(re2.compute().mean().item())
def test_incorrect_y_classes():
def _test(average):
re = Recall(average=average)
y_pred = torch.randint(0, 2, size=(10, 4)).float()
y = torch.randint(4, 5, size=(10,)).long()
with pytest.raises(ValueError):
re.update((y_pred, y))
_test(average=True)
_test(average=False)
| 40.378788
| 114
| 0.620263
| 4,106
| 29,315
| 4.257915
| 0.035314
| 0.050621
| 0.061717
| 0.050449
| 0.950638
| 0.943717
| 0.924441
| 0.910542
| 0.885317
| 0.872848
| 0
| 0.034729
| 0.239741
| 29,315
| 725
| 115
| 40.434483
| 0.74972
| 0.033601
| 0
| 0.807623
| 0
| 0
| 0.018979
| 0
| 0
| 0
| 0
| 0
| 0.14882
| 1
| 0.049002
| false
| 0
| 0.012704
| 0
| 0.063521
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31754e694149f3eb2f9d342e52cb3aed3e3e8c4b
| 9,922
|
py
|
Python
|
scieio/spectroscopy/models.py
|
arnelimperial/scieio
|
279a25766f20d074a3df824c0fbc8b2d8e35f272
|
[
"MIT"
] | null | null | null |
scieio/spectroscopy/models.py
|
arnelimperial/scieio
|
279a25766f20d074a3df824c0fbc8b2d8e35f272
|
[
"MIT"
] | 8
|
2021-03-19T01:56:44.000Z
|
2022-03-12T00:24:21.000Z
|
scieio/spectroscopy/models.py
|
arnelimperial/scieio
|
279a25766f20d074a3df824c0fbc8b2d8e35f272
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.utils.text import slugify
from django.urls import reverse
from django.core.exceptions import ValidationError
from scieio.analytical_instruments.models import Instrumentation
from scieio.manufacturers.models import Manufacturer
from scieio.sellers.models import Seller
from scieio.conditions.models import Condition
import random
class Spectroscopy(models.Model):
instrumentation = models.ForeignKey(Instrumentation, on_delete=models.CASCADE)
name = models.CharField(max_length=255)
slug = models.SlugField(max_length=255, unique=True, editable=False)
created = models.DateTimeField(auto_now=False, auto_now_add=True)
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
class Meta:
ordering = ['id']
unique_together = ('name', 'slug')
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('spectroscopy-detail', kwargs=kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
def product_code_start():
return random.randint(1, 99)
def product_code_end():
return random.randint(1, 99)
def aa_count():
obj_gas = AtomicAbsorption.objects.all().count()
if obj_gas == 0:
return 1
else:
return obj_gas + 1
class AtomicAbsorption(models.Model):
spectroscopy_category = models.ForeignKey(
Spectroscopy,
on_delete=models.CASCADE,
related_name='atomics',
related_query_name='atomic'
)
name = models.CharField(max_length=255)
slug = models.SlugField(max_length=255, unique=True, editable=False)
description = models.TextField()
product_code = models.CharField(
unique=True,
blank=False,
max_length=15,
editable=False
)
model = models.CharField(max_length=255, unique=True)
condition = models.ForeignKey(Condition, on_delete=models.CASCADE)
warranty = models.BooleanField(default=True)
seller = models.ForeignKey(Seller, on_delete=models.CASCADE)
manufacturer = models.ForeignKey(Manufacturer, on_delete=models.CASCADE)
image = models.URLField()
availability = models.BooleanField(default=True)
price = models.DecimalField(max_digits=10, decimal_places=2)
created = models.DateTimeField(auto_now=False, auto_now_add=True)
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
class Meta:
ordering = ['id']
unique_together = ('name', 'slug')
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('gcSystem-detail', kwargs=kwargs)
def clean(self, *args, **kwargs):
# code = self.cleaned_data['product_code']
pc = AtomicAbsorption.objects.filter(product_code=self.product_code)
if pc:
raise ValidationError('Product code already exist!')
super(AtomicAbsorption, self).clean(*args, **kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
self.product_code = "{}-{}{}{}".format(
"AA", product_code_start(), aa_count(), product_code_end()
)
self.full_clean()
super().save(*args, **kwargs)
def spectrophotometer_count():
obj_gas = Spectrophotometer.objects.latest('id')
if obj_gas.id == 0:
return 1
else:
return obj_gas.id + 1
class Spectrophotometer(models.Model):
spectroscopy_category = models.ForeignKey(Spectroscopy, on_delete=models.CASCADE)
name = models.CharField(max_length=255)
slug = models.SlugField(max_length=255, unique=True, editable=False)
description = models.TextField()
product_code = models.CharField(
unique=True,
blank=False,
max_length=10,
editable=False
)
model = models.CharField(max_length=255, unique=True)
condition = models.ForeignKey(Condition, on_delete=models.CASCADE)
warranty = models.BooleanField(default=True)
seller = models.ForeignKey(Seller, on_delete=models.CASCADE)
manufacturer = models.ForeignKey(Manufacturer, on_delete=models.CASCADE)
image = models.URLField()
availability = models.BooleanField(default=True)
price = models.DecimalField(max_digits=10, decimal_places=2)
created = models.DateTimeField(auto_now=False, auto_now_add=True)
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
class Meta:
ordering = ['id']
unique_together = ('name', 'slug')
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('lc-detail', kwargs=kwargs)
def clean(self, *args, **kwargs):
# code = self.cleaned_data['product_code']
pc = Spectrophotometer.objects.filter(product_code=self.product_code)
if pc:
raise ValidationError('Product code already exist!')
super(Spectrophotometer, self).clean(*args, **kwargs)
def save(self, *args, **kwargs):
value = self.name
self.product_code = "{}-{}{}{}".format(
"SP", product_code_start(), spectrophotometer_count(), product_code_end()
)
self.slug = slugify(value, allow_unicode=True)
self.full_clean()
super().save(*args, **kwargs)
def icp_count():
obj_gas = ICP.objects.all().count()
if obj_gas == 0:
return 1
else:
return obj_gas + 1
class ICP(models.Model):
spectroscopy_category = models.ForeignKey(
Spectroscopy,
on_delete=models.CASCADE,
related_name='icps',
related_query_name='icp'
)
name = models.CharField(max_length=255)
slug = models.SlugField(max_length=255, unique=True, editable=False)
description = models.TextField()
product_code = models.CharField(
unique=True,
blank=False,
max_length=15,
editable=False
)
model = models.CharField(max_length=255, unique=True)
condition = models.ForeignKey(Condition,on_delete=models.CASCADE)
warranty = models.BooleanField(default=True)
seller = models.ForeignKey(Seller, on_delete=models.CASCADE)
manufacturer = models.ForeignKey(Manufacturer, on_delete=models.CASCADE)
image = models.URLField()
availability = models.BooleanField(default=True)
price = models.DecimalField(max_digits=10, decimal_places=2)
created = models.DateTimeField(auto_now=False, auto_now_add=True)
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
class Meta:
ordering = ['id']
unique_together = ('name', 'slug')
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('icp-detail', kwargs=kwargs)
def clean(self, *args, **kwargs):
# code = self.cleaned_data['product_code']
pc = ICP.objects.filter(product_code=self.product_code)
if pc:
raise ValidationError('Product code already exist!')
super(ICP, self).clean(*args, **kwargs)
def save(self, *args, **kwargs):
value = self.name
self.slug = slugify(value, allow_unicode=True)
self.product_code = "{}-{}{}{}".format(
"ICP", product_code_start(), icp_count(), product_code_end()
)
self.full_clean()
super().save(*args, **kwargs)
def ftir_count():
obj_liquid = FTIR.objects.all().count()
if obj_liquid == 0:
return 1
else:
return obj_liquid + 1
class FTIR(models.Model):
spectroscopy_category = models.ForeignKey(
Spectroscopy,
on_delete=models.CASCADE,
related_name='ftirs',
related_query_name='ftir'
)
name = models.CharField(max_length=255)
slug = models.SlugField(max_length=255, unique=True, editable=False)
description = models.TextField()
product_code = models.CharField(
unique=True,
blank=False,
max_length=10,
editable=False
)
model = models.CharField(max_length=255, unique=True)
condition = models.ForeignKey(Condition, on_delete=models.CASCADE)
warranty = models.BooleanField(default=True)
seller = models.ForeignKey(Seller, on_delete=models.CASCADE)
manufacturer = models.ForeignKey(Manufacturer, on_delete=models.CASCADE)
image = models.URLField()
availability = models.BooleanField(default=True)
price = models.DecimalField(max_digits=10, decimal_places=2)
created = models.DateTimeField(auto_now=False, auto_now_add=True)
updated = models.DateTimeField(auto_now=True, auto_now_add=False)
class Meta:
ordering = ['id']
unique_together = ('name', 'slug')
def __str__(self):
return self.name
def get_absolute_url(self):
kwargs = {
'pk': self.id,
'slug': self.slug
}
return reverse('lc-detail', kwargs=kwargs)
def clean(self, *args, **kwargs):
# code = self.cleaned_data['product_code']
pc = FTIR.objects.filter(product_code=self.product_code)
if pc:
raise ValidationError('Product code already exist!')
super(FTIR, self).clean(*args, **kwargs)
def save(self, *args, **kwargs):
value = self.name
self.product_code = "{}-{}{}{}".format(
"FT", product_code_start(), ftir_count(), product_code_end()
)
self.slug = slugify(value, allow_unicode=True)
self.full_clean()
super().save(*args, **kwargs)
| 32.638158
| 85
| 0.652993
| 1,149
| 9,922
| 5.469104
| 0.111401
| 0.059516
| 0.037874
| 0.056811
| 0.853596
| 0.846913
| 0.835614
| 0.831795
| 0.831318
| 0.831318
| 0
| 0.010466
| 0.229591
| 9,922
| 303
| 86
| 32.745875
| 0.811617
| 0.016428
| 0
| 0.735178
| 0
| 0
| 0.033422
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098814
| false
| 0
| 0.035573
| 0.027668
| 0.509881
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
31c2f0f60e9c94b5cd3d5cbf1d90720a161699f4
| 276
|
py
|
Python
|
tests/VariableName/ImportVarName.py
|
yhu-insight/python-toolkit
|
e53b2b4a63b455ca88955f18a2c00512a6de494b
|
[
"Apache-2.0"
] | null | null | null |
tests/VariableName/ImportVarName.py
|
yhu-insight/python-toolkit
|
e53b2b4a63b455ca88955f18a2c00512a6de494b
|
[
"Apache-2.0"
] | null | null | null |
tests/VariableName/ImportVarName.py
|
yhu-insight/python-toolkit
|
e53b2b4a63b455ca88955f18a2c00512a6de494b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Python __name__ module ImportVarName
# Author - yucheng.hu@insight.com
print("ImportVarName __name__ = %s" % __name__)
if __name__ == "__main__":
print("ImportVarName is being run directly")
else:
print("ImportVarName is being imported")
| 21.230769
| 48
| 0.702899
| 32
| 276
| 5.4375
| 0.6875
| 0.310345
| 0.229885
| 0.287356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004348
| 0.166667
| 276
| 12
| 49
| 23
| 0.752174
| 0.326087
| 0
| 0
| 0
| 0
| 0.554945
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.6
| 0
| 0.6
| 0.6
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
31c46ba9ff0d053af856618bfaf3d9d541c34e07
| 1,497
|
py
|
Python
|
keras_cv_attention_models/__init__.py
|
dcleres/keras_cv_attention_models
|
264876673e369f23eff49b3b589b72f908a9625b
|
[
"MIT"
] | null | null | null |
keras_cv_attention_models/__init__.py
|
dcleres/keras_cv_attention_models
|
264876673e369f23eff49b3b589b72f908a9625b
|
[
"MIT"
] | null | null | null |
keras_cv_attention_models/__init__.py
|
dcleres/keras_cv_attention_models
|
264876673e369f23eff49b3b589b72f908a9625b
|
[
"MIT"
] | null | null | null |
from .version import __version__
from keras_cv_attention_models import attention_layers
from keras_cv_attention_models import model_surgery
from keras_cv_attention_models import beit
from keras_cv_attention_models import botnet
from keras_cv_attention_models import coat
from keras_cv_attention_models import coatnet
from keras_cv_attention_models import convnext
from keras_cv_attention_models import cotnet
from keras_cv_attention_models import cmt
from keras_cv_attention_models import efficientnet
from keras_cv_attention_models import efficientdet
from keras_cv_attention_models import halonet
from keras_cv_attention_models import levit
from keras_cv_attention_models import mlp_family
from keras_cv_attention_models.mlp_family import mlp_mixer
from keras_cv_attention_models.mlp_family import res_mlp
from keras_cv_attention_models.mlp_family import gated_mlp
from keras_cv_attention_models import nfnets
from keras_cv_attention_models import resnest
from keras_cv_attention_models import resnet_family
from keras_cv_attention_models.resnet_family import resnext
from keras_cv_attention_models.resnet_family import resnet_quad
from keras_cv_attention_models.resnet_family import resnet_deep
from keras_cv_attention_models.resnet_family import regnet
from keras_cv_attention_models import volo
from keras_cv_attention_models import yolox
from keras_cv_attention_models import download_and_load
from keras_cv_attention_models import visualizing
from keras_cv_attention_models import imagenet
| 48.290323
| 63
| 0.914496
| 232
| 1,497
| 5.431034
| 0.172414
| 0.207143
| 0.253175
| 0.460317
| 0.819841
| 0.819841
| 0.246825
| 0.246825
| 0.079365
| 0
| 0
| 0
| 0.08016
| 1,497
| 30
| 64
| 49.9
| 0.915033
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
730d51754eba163328a7dce5d6bd4e738e606608
| 192
|
py
|
Python
|
Serveur_CNN/BackEnd/Result.py
|
Brisseta/CNN_CompterVision
|
1513cf8839d2f8ce83ce25cbce505e8cfe935b7a
|
[
"Apache-2.0"
] | null | null | null |
Serveur_CNN/BackEnd/Result.py
|
Brisseta/CNN_CompterVision
|
1513cf8839d2f8ce83ce25cbce505e8cfe935b7a
|
[
"Apache-2.0"
] | null | null | null |
Serveur_CNN/BackEnd/Result.py
|
Brisseta/CNN_CompterVision
|
1513cf8839d2f8ce83ce25cbce505e8cfe935b7a
|
[
"Apache-2.0"
] | null | null | null |
class ResultFormat:
def __init__(self, score, url):
self.score = score
self.url = url
def __str__(self):
return "score %f , url : %s" % (self.score, self.url)
| 24
| 61
| 0.572917
| 25
| 192
| 4.08
| 0.44
| 0.264706
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.296875
| 192
| 7
| 62
| 27.428571
| 0.755556
| 0
| 0
| 0
| 0
| 0
| 0.098958
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7dfa123e114d54bfeae0745e7534548b7ceb0751
| 2,994
|
py
|
Python
|
vunit_run_dynamic_simulation_library.py
|
johonkanen/dynamic_simulation_library
|
42dd545a603b18d379fadb111e8279d75ad3dfaa
|
[
"MIT"
] | 2
|
2022-03-14T07:17:52.000Z
|
2022-03-14T07:37:38.000Z
|
vunit_run_dynamic_simulation_library.py
|
johonkanen/dynamic_simulation_library
|
42dd545a603b18d379fadb111e8279d75ad3dfaa
|
[
"MIT"
] | null | null | null |
vunit_run_dynamic_simulation_library.py
|
johonkanen/dynamic_simulation_library
|
42dd545a603b18d379fadb111e8279d75ad3dfaa
|
[
"MIT"
] | 1
|
2022-03-14T12:57:43.000Z
|
2022-03-14T12:57:43.000Z
|
#!/usr/bin/env python3
from pathlib import Path
from vunit import VUnit
# ROOT
ROOT = Path(__file__).resolve().parent
VU = VUnit.from_argv()
mathlib = VU.add_library("math_library")
mathlib.add_source_files(ROOT / "../math_library/multiplier/multiplier_base_types_22bit_pkg.vhd")
mathlib.add_source_files(ROOT / "../math_library/multiplier/multiplier_pkg.vhd")
mathlib.add_source_files(ROOT / "../math_library/sincos/sincos_pkg.vhd")
mathlib.add_source_files(ROOT / "../math_library/pi_controller/pi_controller_pkg.vhd")
mathlib.add_source_files(ROOT / "../math_library/coordinate_transforms/abc_to_ab_transform/abc_to_ab_transform_pkg.vhd")
mathlib.add_source_files(ROOT / "../math_library/coordinate_transforms/abc_to_ab_transform/ab_to_abc_transform_pkg.vhd")
mathlib.add_source_files(ROOT / "../math_library/coordinate_transforms/ab_to_dq_transform/dq_to_ab_transform_pkg.vhd")
mathlib.add_source_files(ROOT / "../math_library/coordinate_transforms/ab_to_dq_transform/ab_to_dq_transform_pkg.vhd")
mathlib.add_source_files(ROOT / "../math_library/multiplier/simulation/tb_multiplier.vhd")
mathlib.add_source_files(ROOT / "../math_library/coordinate_transforms/abc_to_ab_transform/abc_to_ab_transform_simulation/tb_abc_to_ab_transform.vhd")
mathlib.add_source_files(ROOT / "../math_library/coordinate_transforms/ab_to_dq_transform/ab_to_dq_simulation/tb_ab_to_dq_transforms.vhd")
mathlib.add_source_files(ROOT / "state_variable/state_variable_pkg.vhd")
mathlib.add_source_files(ROOT / "ac_motor_models/pmsm_electrical_model_pkg.vhd")
mathlib.add_source_files(ROOT / "ac_motor_models/pmsm_mechanical_model_pkg.vhd")
mathlib.add_source_files(ROOT / "ac_motor_models/permanent_magnet_motor_model_pkg.vhd")
mathlib.add_source_files(ROOT / "ac_motor_models/field_oriented_motor_control/field_oriented_motor_control_pkg.vhd")
mathlib.add_source_files(ROOT / "lcr_filter_model/lcr_filter_model_pkg.vhd")
mathlib.add_source_files(ROOT / "lcr_filter_model/lcr_filter_simulation/tb_lcr_filter.vhd")
mathlib.add_source_files(ROOT / "../math_library/coordinate_transforms/ab_to_dq_transform/ab_to_dq_transform_pkg.vhd")
mathlib.add_source_files(ROOT / "ac_motor_models/simulate_permanent_magnet_synchronous_machine/tb_permanent_magnet_synchronous_machine_model.vhd")
mathlib.add_source_files(ROOT / "ac_motor_models/field_oriented_motor_control/field_oriented_motor_control_simulation/tb_field_oriented_motor_control.vhd")
mathlib.add_source_files(ROOT / "inverter_model/inverter_model_pkg.vhd")
mathlib.add_source_files(ROOT / "inverter_model/inverter_model_simulation/tb_inverter_model.vhd")
mathlib.add_source_files(ROOT / "power_supply_model/power_supply_simulation_model_pkg.vhd")
mathlib.add_source_files(ROOT / "power_supply_model/psu_inverter_simulation_models_pkg.vhd")
mathlib.add_source_files(ROOT / "power_supply_model/power_supply_model_simulation/tb_power_supply_model.vhd")
mathlib.add_source_files(ROOT / "state_variable/simulation/tb_state_variable.vhd")
VU.main()
| 63.702128
| 155
| 0.855377
| 455
| 2,994
| 5.101099
| 0.138462
| 0.116329
| 0.186127
| 0.244291
| 0.752693
| 0.752693
| 0.752693
| 0.74623
| 0.705299
| 0.640672
| 0
| 0.001046
| 0.04175
| 2,994
| 46
| 156
| 65.086957
| 0.807947
| 0.008684
| 0
| 0.060606
| 0
| 0
| 0.613621
| 0.609575
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.060606
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b426e013af1e94a352e68a54473a082da16100b6
| 221
|
py
|
Python
|
gymgrid/envs/__init__.py
|
wsgdrfz/gymgrid
|
38de6d7963e73e6ca13f22b43e4abede335da26b
|
[
"MIT"
] | 1
|
2019-12-18T18:48:10.000Z
|
2019-12-18T18:48:10.000Z
|
gymgrid/envs/__init__.py
|
wsgdrfz/gymgrid
|
38de6d7963e73e6ca13f22b43e4abede335da26b
|
[
"MIT"
] | null | null | null |
gymgrid/envs/__init__.py
|
wsgdrfz/gymgrid
|
38de6d7963e73e6ca13f22b43e4abede335da26b
|
[
"MIT"
] | null | null | null |
from gymgrid.envs.general_grid import GridWorld
from gymgrid.envs.cliff_env import Cliff
from gymgrid.envs.windy_grid import WindyGridWorld
from gymgrid.envs.samples import Sample1
from gymgrid.envs.samples import Sample2
| 44.2
| 50
| 0.868778
| 33
| 221
| 5.727273
| 0.424242
| 0.291005
| 0.396825
| 0.232804
| 0.296296
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.085973
| 221
| 5
| 51
| 44.2
| 0.925743
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b455c3842103af9e3e91984ee656cd77788e9ccf
| 14,608
|
py
|
Python
|
tests/core/util/test_files.py
|
Salvia-Network/salvia-blockchain
|
b0ce4b9f75c2fc354941b45eb468ffcf917ead30
|
[
"Apache-2.0"
] | 6
|
2021-09-13T17:20:49.000Z
|
2022-02-09T04:31:47.000Z
|
tests/core/util/test_files.py
|
Salvia-Network/salvia-blockchain
|
b0ce4b9f75c2fc354941b45eb468ffcf917ead30
|
[
"Apache-2.0"
] | 21
|
2021-09-20T00:56:54.000Z
|
2022-03-22T01:12:12.000Z
|
tests/core/util/test_files.py
|
Salvia-Network/salvia-blockchain
|
b0ce4b9f75c2fc354941b45eb468ffcf917ead30
|
[
"Apache-2.0"
] | 9
|
2021-09-13T17:54:04.000Z
|
2022-03-15T08:38:35.000Z
|
import os
import pytest
import shutil
import sys
from salvia.util import files
from salvia.util.files import move_file, move_file_async, write_file_async
from pathlib import Path
class TestMoveFile:
# use tmp_path pytest fixture to create a temporary directory
def test_move_file(self, tmp_path: Path):
"""
Move a file from one location to another and verify the contents.
"""
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
move_file(src_path, dst_path)
assert src_path.exists() is False
assert dst_path.exists() is True
assert dst_path.read_text() == "source"
# use tmp_path pytest fixture to create a temporary directory
def test_move_file_with_overwrite(self, tmp_path: Path):
"""
Move a file from one location to another, overwriting the destination.
"""
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
dst_path.write_text("destination")
move_file(src_path, dst_path)
assert src_path.exists() is False
assert dst_path.exists() is True
assert dst_path.read_text() == "source"
# use tmp_path pytest fixture to create a temporary directory
def test_move_file_create_intermediate_dirs(self, tmp_path: Path):
"""
Move a file from one location to another, creating intermediate directories at the destination.
"""
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination" / "destination.txt"
move_file(src_path, dst_path)
assert src_path.exists() is False
assert dst_path.exists() is True
assert dst_path.read_text() == "source"
# use tmp_path pytest fixture to create a temporary directory
def test_move_file_existing_intermediate_dirs(self, tmp_path: Path):
"""
Move a file from one location to another, where intermediate directories already exist at the destination.
"""
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination" / "destination.txt"
dst_path.parent.mkdir(parents=True, exist_ok=False)
assert dst_path.parent.exists()
move_file(src_path, dst_path)
assert src_path.exists() is False
assert dst_path.exists() is True
assert dst_path.read_text() == "source"
# use tmp_path pytest fixture to create a temporary directory
def test_move_file_source_missing(self, tmp_path: Path):
"""
Expect failure when moving a file from one location to another, where the source does not exist.
"""
src_path: Path = tmp_path / "source.txt"
dst_path: Path = tmp_path / "destination.txt"
with pytest.raises(FileNotFoundError):
move_file(src_path, dst_path)
assert src_path.exists() is False
assert dst_path.exists() is False
# use tmp_path pytest fixture to create a temporary directory
def test_move_file_os_replace_raising_permissionerror(self, tmp_path: Path, monkeypatch):
"""
Simulate moving a file with os.replace raising a PermissionError. The move should succeed
after using shutil.move to move the file.
"""
def mock_os_replace(src, dst):
raise PermissionError("test")
monkeypatch.setattr(os, "replace", mock_os_replace)
shutil_move_called: bool = False
original_shutil_move = shutil.move
def mock_shutil_move(src, dst):
nonlocal shutil_move_called
shutil_move_called = True
original_shutil_move(src, dst)
monkeypatch.setattr(shutil, "move", mock_shutil_move)
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
move_file(src_path, dst_path)
assert shutil_move_called is True
assert src_path.exists() is False
assert dst_path.exists() is True
assert dst_path.read_text() == "source"
# use tmp_path pytest fixture to create a temporary directory
def test_move_file_overwrite_os_replace_raising_exception(self, tmp_path: Path, monkeypatch):
"""
Simulate moving a file with os.replace raising an exception. The move should succeed,
overwriting the destination.
"""
def mock_os_replace(src, dst):
raise PermissionError("test")
monkeypatch.setattr(os, "replace", mock_os_replace)
shutil_move_called: bool = False
original_shutil_move = shutil.move
def mock_shutil_move(src, dst):
nonlocal shutil_move_called
shutil_move_called = True
original_shutil_move(src, dst)
monkeypatch.setattr(shutil, "move", mock_shutil_move)
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
dst_path.write_text("destination")
move_file(src_path, dst_path)
assert shutil_move_called is True
assert src_path.exists() is False
assert dst_path.exists() is True
assert dst_path.read_text() == "source"
# use tmp_path pytest fixture to create a temporary directory
def test_move_file_failing(self, tmp_path: Path, monkeypatch):
"""
Simulate moving a file with both os.replace and shutil.move raising exceptions. The move should fail.
"""
def mock_os_replace(src, dst):
raise RuntimeError("test")
monkeypatch.setattr(os, "replace", mock_os_replace)
def mock_shutil_move(src, dst):
raise RuntimeError("test2")
monkeypatch.setattr(shutil, "move", mock_shutil_move)
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
with pytest.raises(RuntimeError):
move_file(src_path, dst_path)
assert src_path.exists() is True
assert dst_path.exists() is False
class TestMoveFileAsync:
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_move_file_async(self, tmp_path: Path):
"""
Move a file from one location to another.
"""
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
await move_file_async(src_path, dst_path)
assert src_path.exists() is False
assert dst_path.exists() is True
assert dst_path.read_text() == "source"
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_move_file_async_failure_no_reattempts(self, tmp_path: Path, monkeypatch):
"""
Simulate moving a file where the move fails and no reattempts are made. The move should fail.
"""
move_file_called: bool = False
def mock_move_file(src, dst):
nonlocal move_file_called
move_file_called = True
raise Exception("test")
monkeypatch.setattr(files, "move_file", mock_move_file)
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
with pytest.raises(FileNotFoundError):
await move_file_async(src_path, dst_path, reattempts=0)
assert move_file_called is True
assert src_path.exists() is True
assert dst_path.exists() is False
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_move_file_async_success_on_reattempt(self, tmp_path: Path, monkeypatch):
"""
Simulate moving a file where the move initially fails and then succeeds after reattempting.
The move should succeed.
"""
failed_attempts: int = 2
reattempts: int = 0
original_os_replace = os.replace
def mock_os_replace(src, dst):
nonlocal failed_attempts, reattempts
if reattempts < failed_attempts:
reattempts += 1
raise Exception("test")
else:
original_os_replace(src, dst)
monkeypatch.setattr(os, "replace", mock_os_replace)
def mock_shutil_move(src, dst):
raise Exception("test2")
monkeypatch.setattr(shutil, "move", mock_shutil_move)
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
await move_file_async(src_path, dst_path, reattempts=failed_attempts + 1)
assert reattempts == 2
assert src_path.exists() is False
assert dst_path.exists() is True
assert dst_path.read_text() == "source"
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_move_file_async_failure_on_reattempt(self, tmp_path: Path, monkeypatch):
"""
Simulate moving a file where the move fails and exhausts all reattempts. The move should fail.
"""
total_allowed_attempts: int = 3
attempts: int = 0
def mock_os_replace(src, dst):
nonlocal attempts
attempts += 1
raise Exception("test")
monkeypatch.setattr(os, "replace", mock_os_replace)
def mock_shutil_move(src, dst):
raise Exception("test2")
monkeypatch.setattr(shutil, "move", mock_shutil_move)
src_path: Path = tmp_path / "source.txt"
src_path.write_text("source")
dst_path: Path = tmp_path / "destination.txt"
with pytest.raises(FileNotFoundError):
await move_file_async(src_path, dst_path, reattempts=total_allowed_attempts - 1)
assert attempts == total_allowed_attempts
assert src_path.exists() is True
assert dst_path.exists() is False
class TestWriteFile:
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_write_file(self, tmp_path: Path):
"""
Write a file to a location.
"""
dest_path: Path = tmp_path / "test_write_file.txt"
await write_file_async(dest_path, "test")
assert dest_path.read_text() == "test"
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_write_file_overwrite(self, tmp_path: Path):
"""
Write a file to a location and overwrite the file if it already exists.
"""
dest_path: Path = tmp_path / "test_write_file.txt"
dest_path.write_text("test")
await write_file_async(dest_path, "test2")
assert dest_path.read_text() == "test2"
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_write_file_create_intermediate_dirs(self, tmp_path: Path):
"""
Write a file to a location and create intermediate directories if they do not exist.
"""
dest_path: Path = tmp_path / "test_write_file/a/b/c/test_write_file.txt"
await write_file_async(dest_path, "test")
assert dest_path.read_text() == "test"
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_write_file_existing_intermediate_dirs(self, tmp_path: Path):
"""
Write a file to a location and where intermediate directories aleady exist.
"""
dest_path: Path = tmp_path / "test_write_file/a/b/c/test_write_file.txt"
dest_path.parent.mkdir(parents=True, exist_ok=False)
assert dest_path.parent.exists()
await write_file_async(dest_path, "test")
assert dest_path.read_text() == "test"
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_write_file_default_permissions(self, tmp_path: Path):
"""
Write a file to a location and use the default permissions.
"""
if sys.platform in ["win32", "cygwin"]:
pytest.skip("Setting UNIX file permissions doesn't apply to Windows")
dest_path: Path = tmp_path / "test_write_file/test_write_file.txt"
assert not dest_path.parent.exists()
await write_file_async(dest_path, "test")
assert dest_path.read_text() == "test"
# Expect: parent directory has default permissions of 0o700
assert oct(dest_path.parent.stat().st_mode)[-3:] == oct(0o700)[-3:]
# Expect: file has default permissions of 0o600
assert oct(dest_path.stat().st_mode)[-3:] == oct(0o600)[-3:]
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_write_file_custom_permissions(self, tmp_path: Path):
"""
Write a file to a location and use custom permissions.
"""
if sys.platform in ["win32", "cygwin"]:
pytest.skip("Setting UNIX file permissions doesn't apply to Windows")
dest_path: Path = tmp_path / "test_write_file/test_write_file.txt"
await write_file_async(dest_path, "test", file_mode=0o642)
assert dest_path.read_text() == "test"
# Expect: file has custom permissions of 0o642
assert oct(dest_path.stat().st_mode)[-3:] == oct(0o642)[-3:]
@pytest.mark.asyncio
# use tmp_path pytest fixture to create a temporary directory
async def test_write_file_os_replace_raising_permissionerror(self, tmp_path: Path, monkeypatch):
"""
Write a file to a location where os.replace raises PermissionError.
"""
def mock_os_replace(src, dst):
raise PermissionError("test")
monkeypatch.setattr(os, "replace", mock_os_replace)
shutil_move_called: bool = False
original_shutil_move = shutil.move
def mock_shutil_move(src, dst):
nonlocal shutil_move_called
shutil_move_called = True
original_shutil_move(src, dst)
monkeypatch.setattr(shutil, "move", mock_shutil_move)
dest_path: Path = tmp_path / "test_write_file/test_write_file.txt"
await write_file_async(dest_path, "test")
assert shutil_move_called is True
| 37.552699
| 114
| 0.659639
| 1,930
| 14,608
| 4.743523
| 0.080829
| 0.052758
| 0.037247
| 0.050792
| 0.824686
| 0.813326
| 0.803823
| 0.78722
| 0.778591
| 0.755216
| 0
| 0.004889
| 0.257941
| 14,608
| 388
| 115
| 37.649485
| 0.839668
| 0.142045
| 0
| 0.704348
| 0
| 0
| 0.087849
| 0.01661
| 0
| 0
| 0
| 0
| 0.217391
| 1
| 0.091304
| false
| 0
| 0.030435
| 0
| 0.134783
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b48818ad44072d67d0286982612b3c94bc4a9b0e
| 21,780
|
py
|
Python
|
utils/safe_set_utils.py
|
KTH-SML/multi-agent-LMPC
|
d81929c5c64c0a34a1999ff858afd20862f4f3b7
|
[
"MIT"
] | 4
|
2021-07-02T10:25:02.000Z
|
2022-01-09T12:06:36.000Z
|
utils/safe_set_utils.py
|
KTH-SML/multi-agent-LMPC
|
d81929c5c64c0a34a1999ff858afd20862f4f3b7
|
[
"MIT"
] | null | null | null |
utils/safe_set_utils.py
|
KTH-SML/multi-agent-LMPC
|
d81929c5c64c0a34a1999ff858afd20862f4f3b7
|
[
"MIT"
] | 4
|
2021-03-31T15:43:42.000Z
|
2022-01-09T12:19:24.000Z
|
from __future__ import division
import numpy as np
import numpy.linalg as la
import scipy as sp
import scipy.spatial
import cvxpy as cp
import matplotlib.pyplot as plt
from sklearn import svm
import pdb, itertools, matplotlib
import warnings
warnings.filterwarnings("ignore")
def get_safe_set(x_cls, agents, des_num_ts='all', des_num_iters='all'):
n_a = len(x_cls[0])
n_x = x_cls[0][0].shape[0]
c = [matplotlib.cm.get_cmap('jet')(i*(1./(n_a-1))) for i in range(n_a)]
# Enumerate pairs of agents
pairs = list(itertools.combinations(range(n_a), 2))
# Get the minimum distance for collision avoidance between agents based on the geometry of their occupied space
min_dist = []
r_a = [agents[i].get_collision_buff_r() for i in range(n_a)]
for p in pairs:
dist = r_a[p[0]] + r_a[p[1]]
min_dist.append(dist)
num_ts = 0
num_iters = len(x_cls)
cl_lens = []
# Get the longest trajectory over the last iteration
it_start = max(0, num_iters-des_num_iters)
orig_range = range(it_start, num_iters)
for j in orig_range:
iter_cls = x_cls[j]
it_cl_lens = []
for agent_cl in iter_cls:
it_cl_lens.append(agent_cl.shape[1])
if agent_cl.shape[1] > num_ts and j == orig_range[-1]:
num_ts = agent_cl.shape[1]
cl_lens.append(it_cl_lens)
# Set number of time steps to be included to the trajectory length if it was larger
if num_ts < des_num_ts:
des_num_ts = num_ts
if des_num_iters == 'all':
des_num_iters = num_iters
if des_num_ts == 'all':
des_num_ts = num_ts
# safe_set_idxs = [agent_0_ss_idxs, agent_1_ss_idxs, ... , agent_M_ss_idxs]
# agent_#_ss_idxs = [ss_idxs_0, ss_idxs_1, ... , ss_idxs_T]
safe_sets_idxs = [[] for _ in range(n_a)]
exploration_spaces = [[] for _ in range(n_a)]
last_invalid_t = -1
for t in range(num_ts):
# Determine starting iteration index and ending time step index
it_start = max(0, num_iters-des_num_iters)
# ts_end = min(num_ts, t+des_num_ts)
ts_end = t+des_num_ts
H_t = [[] for _ in range(n_a)]
g_t = [[] for _ in range(n_a)]
while True:
# Construct candidate safe set
print('Constructing safe set from iteration %i to %i and time %i to %i' % (it_start, num_iters-1, t, ts_end-1))
safe_set_cand_t = []
for a in range(n_a):
it_range = range(it_start, num_iters)
ts_range = []
for j in it_range:
i = orig_range.index(j)
ts_range.append(range(min(t, cl_lens[i][a]-1), min(ts_end, cl_lens[i][a])))
# print(range(min(t, cl_lens[i][a]-1), min(ts_end, cl_lens[i][a])), x_cls[j][a].shape)
ss_idxs = {'it_range' : it_range, 'ts_range' : ts_range}
safe_set_cand_t.append(ss_idxs) # Candidate safe sets at this time step
# Check for potential overlap and minimum distance between agent safe sets
all_valid = True
for (p, d) in zip(pairs, min_dist):
collision = False
# Collision only defined for position states
safe_set_pos_0 = np.empty((2,0))
safe_set_pos_1 = np.empty((2,0))
for (i, j) in enumerate(safe_set_cand_t[p[0]]['it_range']):
safe_set_pos_0 = np.append(safe_set_pos_0, x_cls[j][p[0]][:2,safe_set_cand_t[p[0]]['ts_range'][i]], axis=1)
safe_set_pos_1 = np.append(safe_set_pos_1, x_cls[j][p[1]][:2,safe_set_cand_t[p[1]]['ts_range'][i]], axis=1)
# Stack safe set position vectors into data matrix and assign labels agent p[0]: -1, agent p[1]: 1
X = np.append(safe_set_pos_0, safe_set_pos_1, axis=1).T
y = np.append(-np.ones(safe_set_pos_0.shape[1]), np.ones(safe_set_pos_1.shape[1]))
# if t == 68:
# pdb.set_trace()
# Use SVM with linear kernel and no regularization (w'x + b <= -a_0 for agent p[0], w'x + b >= a_1 for agent p[1])
clf = svm.SVC(kernel='linear', C=1000, max_iter=1000)
clf.fit(X, y)
w = np.squeeze(clf.coef_)
b = np.squeeze(clf.intercept_)
# Calculate classifier margin
margin = 2/la.norm(w, 2)
# Check for misclassification of support vectors. This indicates that the safe sets are not linearlly separable
for i in clf.support_:
pred_label = clf.predict(X[i].reshape((1,-1)))
# pred_val = clf.decision_function(X[i].reshape((1,-1)))
if pred_label != y[i]:
collision = True
print('Potential for collision between agents %i and %i' % (p[0],p[1]))
break
# Check for distance between safe sets
if not collision and margin < d:
print('Margin between safe sets for agents %i and %i is too small' % (p[0],p[1]))
# If collision is possible or margin is less than minimum required distance between safe sets, reduce safe set
# iteration and/or time range
# Currently, we reduce iteration range first. If iteration range cannot be reduced any further then we reduce time step range
if collision or margin < d:
all_valid = False
it_start += 1
if it_start >= num_iters:
it_start = max(0, num_iters-des_num_iters)
ts_end -= 1
# Update the time step when a range reduction was last required, we will use this at the end to iterate through
# the safe sets up to this time and make sure that all safe sets use the same iteration and time range
last_invalid_t = t
# Reset the candidate exploration spaces
H_t = [[] for _ in range(n_a)]
g_t = [[] for _ in range(n_a)]
break
# Distance between hyperplanes is (a_0+a_1)/\|w\|
a_0_min = d*la.norm(w, 2)/(1 + r_a[p[1]]/r_a[p[0]])
a_1_min = d*la.norm(w, 2)/(1 + r_a[p[0]]/r_a[p[1]])
ratio_remain_0 = la.norm(x_cls[0][p[0]][:2,-1] - safe_set_pos_0[:,0], 2)/la.norm(x_cls[0][p[0]][:2,-1] - x_cls[0][p[0]][:2,0], 2)
ratio_remain_1 = la.norm(x_cls[0][p[1]][:2,-1] - safe_set_pos_1[:,0], 2)/la.norm(x_cls[0][p[1]][:2,-1] - x_cls[0][p[1]][:2,0], 2)
w_0 = 1.0 # w_0 = np.exp(35*ratio_remain_0-3)/(np.exp(35*ratio_remain_0-3)+1)
w_1 = 1.0 # w_1 = np.exp(35*ratio_remain_1-3)/(np.exp(35*ratio_remain_1-3)+1)
# Solve for tight hyperplane bounds for both collections of points
z = cp.Variable(1)
cost = z
constr = []
for i in range(safe_set_pos_0.shape[1]):
constr += [w.dot(safe_set_pos_0[:,i]) + b <= z]
problem = cp.Problem(cp.Minimize(cost), constr)
problem.solve(solver=cp.MOSEK, verbose=False)
# problem.solve(verbose=False)
a_0_max = -z.value[0]
z = cp.Variable(1)
cost = z
constr = []
for i in range(safe_set_pos_1.shape[1]):
constr += [-w.dot(safe_set_pos_1[:,i]) - b <= z]
problem = cp.Problem(cp.Minimize(cost), constr)
problem.solve(solver=cp.MOSEK, verbose=False)
# problem.solve(verbose=False)
a_1_max = -z.value[0]
if a_0_max > a_0_min and a_1_max > a_1_min:
if w_0 <= w_1:
a_shift = (a_0_max - a_0_min)*(1-w_0/w_1)
a_0 = a_0_min + a_shift
a_1 = a_1_min - a_shift
else:
a_shift = (a_1_max - a_1_min)*(1-w_1/w_0)
a_0 = a_0_min - a_shift
a_1 = a_1_min + a_shift
else:
a_0 = a_0_max - 1e-5 # Deal with precision issues when a point in the safe set is on the exploration space boundary
a_1 = a_1_max - 1e-5
# Exploration spaces
H_t[p[0]].append(w)
g_t[p[0]].append(b+a_0)
H_t[p[1]].append(-w)
g_t[p[1]].append(-b+a_1)
# plot_svm_results(X, y, clf)
# all_valid flag is true if all pair-wise collision and margin checks were passed
if all_valid:
# Save iteration and time range from this time step, start with these values next time step
des_num_iters = num_iters - it_start
des_num_ts = ts_end - t
for a in range(n_a):
H_t[a] = np.array(H_t[a])
g_t[a] = np.array(g_t[a])
print('Safe set construction successful for t = %i, using iteration range %i and time range %i for next time step' % (t, des_num_iters, des_num_ts))
break # Break from while loop
for a in range(n_a):
safe_sets_idxs[a].append(safe_set_cand_t[a])
exploration_spaces[a].append((H_t[a], g_t[a]))
# Adjust safe sets from before last_invalid_t to have the same iteration and time range and test that safe sets are contained
# in the exploration spaces at each time step
for t in range(num_ts-1):
for a in range(n_a):
if t <= last_invalid_t:
old_it_len = len(safe_sets_idxs[a][t]['it_range'])
safe_sets_idxs[a][t]['it_range'] = safe_sets_idxs[a][last_invalid_t+1]['it_range'] # Update iteration range
new_it_len = len(safe_sets_idxs[a][t]['it_range'])
for _ in range(old_it_len - new_it_len):
safe_sets_idxs[a][t]['ts_range'].pop(0) # Throw away iterations that we don't include anymore
for i in range(new_it_len):
n_ss = len(safe_sets_idxs[a][t]['ts_range'][i])
if n_ss > des_num_ts:
safe_sets_idxs[a][t]['ts_range'][i] = safe_sets_idxs[a][t]['ts_range'][i][:des_num_ts] # Update time range for remaining iterations
safe_set_pos = np.empty((2,0))
for (i, j) in enumerate(safe_sets_idxs[a][t]['it_range']):
safe_set_pos = np.append(safe_set_pos, x_cls[j][a][:2,safe_sets_idxs[a][t]['ts_range'][i]], axis=1)
in_exp_space = (exploration_spaces[a][t][0].dot(safe_set_pos) + exploration_spaces[a][t][1].reshape((-1,1)) <= 0)
if not np.all(in_exp_space):
raise(ValueError('Safe set not contained in exploration space at time %i' % t))
# pdb.set_trace()
return safe_sets_idxs, exploration_spaces
def get_safe_set_2(x_cls, agents, des_num_ts='all', des_num_iters='all'):
n_a = len(x_cls[0])
n_x = x_cls[0][0].shape[0]
c = [matplotlib.cm.get_cmap('jet')(i*(1./(n_a-1))) for i in range(n_a)]
# Enumerate pairs of agents
pairs = list(itertools.combinations(range(n_a), 2))
# Get the minimum distance for collision avoidance between agents based on the geometry of their occupied space
min_dist = []
r_a = [agents[i].get_collision_buff_r() for i in range(n_a)]
for p in pairs:
dist = r_a[p[0]] + r_a[p[1]]
min_dist.append(dist)
num_ts = 0
num_iters = len(x_cls)
cl_lens = []
# Get the longest trajectory over the last iteration
it_start = max(0, num_iters-des_num_iters)
orig_range = range(it_start, num_iters)
for j in orig_range:
iter_cls = x_cls[j]
it_cl_lens = []
for agent_cl in iter_cls:
it_cl_lens.append(agent_cl.shape[1])
if agent_cl.shape[1] > num_ts and j == orig_range[-1]:
num_ts = agent_cl.shape[1]
cl_lens.append(it_cl_lens)
# Set number of time steps to be included to the trajectory length if it was larger
if num_ts < des_num_ts:
des_num_ts = num_ts
if des_num_iters == 'all':
des_num_iters = num_iters
if des_num_ts == 'all':
des_num_ts = num_ts
orig_des_num_ts = des_num_ts
orig_des_num_iters = des_num_iters
ss_t = []
ss_n_its = []
ss_n_ts = []
# safe_set_idxs = [agent_0_ss_idxs, agent_1_ss_idxs, ... , agent_M_ss_idxs]
# agent_#_ss_idxs = [ss_idxs_0, ss_idxs_1, ... , ss_idxs_T]
safe_sets_idxs = [[] for _ in range(n_a)]
exploration_spaces = [[] for _ in range(n_a)]
last_invalid_t = -1
for t in range(num_ts):
# Determine starting iteration index and ending time step index
it_start = max(0, num_iters-des_num_iters)
# ts_end = min(num_ts, t+des_num_ts)
ts_end = t+des_num_ts
H_t = [[] for _ in range(n_a)]
g_t = [[] for _ in range(n_a)]
while True:
# Construct candidate safe set
print('Constructing safe set from iteration %i to %i and time %i to %i' % (it_start, num_iters-1, t, ts_end-1))
safe_set_cand_t = []
for a in range(n_a):
it_range = range(it_start, num_iters)
ts_range = []
for j in it_range:
i = orig_range.index(j)
ts_range.append(range(min(t, cl_lens[i][a]-1), min(ts_end, cl_lens[i][a])))
# print(range(min(t, cl_lens[i][a]-1), min(ts_end, cl_lens[i][a])), x_cls[j][a].shape)
ss_idxs = {'it_range' : it_range, 'ts_range' : ts_range}
safe_set_cand_t.append(ss_idxs) # Candidate safe sets at this time step
# Check for potential overlap and minimum distance between agent safe sets
all_valid = True
for (p, d) in zip(pairs, min_dist):
collision = False
# Collision only defined for position states
safe_set_pos_0 = np.empty((2,0))
safe_set_pos_1 = np.empty((2,0))
for (i, j) in enumerate(safe_set_cand_t[p[0]]['it_range']):
safe_set_pos_0 = np.append(safe_set_pos_0, x_cls[j][p[0]][:2,safe_set_cand_t[p[0]]['ts_range'][i]], axis=1)
safe_set_pos_1 = np.append(safe_set_pos_1, x_cls[j][p[1]][:2,safe_set_cand_t[p[1]]['ts_range'][i]], axis=1)
# Stack safe set position vectors into data matrix and assign labels agent p[0]: -1, agent p[1]: 1
X = np.append(safe_set_pos_0, safe_set_pos_1, axis=1).T
y = np.append(-np.ones(safe_set_pos_0.shape[1]), np.ones(safe_set_pos_1.shape[1]))
# if t == 68:
# pdb.set_trace()
# Use SVM with linear kernel and no regularization (w'x + b <= -a_0 for agent p[0], w'x + b >= a_1 for agent p[1])
clf = svm.SVC(kernel='linear', C=1000, max_iter=1000)
clf.fit(X, y)
w = np.squeeze(clf.coef_)
b = np.squeeze(clf.intercept_)
# Calculate classifier margin
margin = 2/la.norm(w, 2)
# Check for misclassification of support vectors. This indicates that the safe sets are not linearlly separable
for i in clf.support_:
pred_label = clf.predict(X[i].reshape((1,-1)))
# pred_val = clf.decision_function(X[i].reshape((1,-1)))
if pred_label != y[i]:
collision = True
print('Potential for collision between agents %i and %i' % (p[0],p[1]))
break
# Check for distance between safe sets
if not collision and margin < d:
print('Margin between safe sets for agents %i and %i is too small' % (p[0],p[1]))
# If collision is possible or margin is less than minimum required distance between safe sets, reduce safe set
# iteration and/or time range
# Currently, we reduce iteration range first. If iteration range cannot be reduced any further then we reduce time step range and reset iteration range
if collision or margin < d:
all_valid = False
it_start += 1
if it_start >= num_iters:
it_start = max(0, num_iters-des_num_iters)
# ts_end -= 1
# ts_end -= 10
if ts_end-t <= 15:
ts_end = max(t+1, ts_end-1)
elif ts_end-t <= 30 and ts_end-t > 15:
ts_end -= 5
else:
ts_end -= 10
# Update the time step when a range reduction was last required, we will use this at the end to iterate through
# the safe sets up to this time and make sure that all safe sets use the same iteration and time range
last_invalid_t = t
# Reset the candidate exploration spaces
H_t = [[] for _ in range(n_a)]
g_t = [[] for _ in range(n_a)]
break
# Distance between hyperplanes is (a_0+a_1)/\|w\|
a_0_min = d*la.norm(w, 2)/(1 + r_a[p[1]]/r_a[p[0]])
a_1_min = d*la.norm(w, 2)/(1 + r_a[p[0]]/r_a[p[1]])
# ratio_remain_0 = la.norm(x_cls[0][p[0]][:2,-1] - safe_set_pos_0[:,0], 2)/la.norm(x_cls[0][p[0]][:2,-1] - x_cls[0][p[0]][:2,0], 2)
# ratio_remain_1 = la.norm(x_cls[0][p[1]][:2,-1] - safe_set_pos_1[:,0], 2)/la.norm(x_cls[0][p[1]][:2,-1] - x_cls[0][p[1]][:2,0], 2)
w_0 = 1.0 # w_0 = np.exp(35*ratio_remain_0-3)/(np.exp(35*ratio_remain_0-3)+1)
w_1 = 1.0 # w_1 = np.exp(35*ratio_remain_1-3)/(np.exp(35*ratio_remain_1-3)+1)
# Solve for tight hyperplane bounds for both collections of points
z = cp.Variable(1)
cost = z
constr = []
for i in range(safe_set_pos_0.shape[1]):
constr += [w.dot(safe_set_pos_0[:,i]) + b <= z]
problem = cp.Problem(cp.Minimize(cost), constr)
problem.solve(solver=cp.MOSEK, verbose=False)
# problem.solve(verbose=False)
a_0_max = -z.value[0]
z = cp.Variable(1)
cost = z
constr = []
for i in range(safe_set_pos_1.shape[1]):
constr += [-w.dot(safe_set_pos_1[:,i]) - b <= z]
problem = cp.Problem(cp.Minimize(cost), constr)
problem.solve(solver=cp.MOSEK, verbose=False)
# problem.solve(verbose=False)
a_1_max = -z.value[0]
if a_0_max > a_0_min and a_1_max > a_1_min:
if w_0 <= w_1:
a_shift = (a_0_max - a_0_min)*(1-w_0/w_1)
a_0 = a_0_min + a_shift
a_1 = a_1_min - a_shift
else:
a_shift = (a_1_max - a_1_min)*(1-w_1/w_0)
a_0 = a_0_min - a_shift
a_1 = a_1_min + a_shift
else:
a_0 = a_0_max - 1e-5 # Deal with precision issues when a point in the safe set is on the exploration space boundary
a_1 = a_1_max - 1e-5
# Exploration spaces
H_t[p[0]].append(w)
g_t[p[0]].append(b+a_0)
H_t[p[1]].append(-w)
g_t[p[1]].append(-b+a_1)
# plot_svm_results(X, y, clf)
# all_valid flag is true if all pair-wise collision and margin checks were passed
if all_valid:
# Save iteration and time range from this time step, start with these values next time step
des_num_iters = num_iters - it_start
des_num_ts = ts_end - t
for a in range(n_a):
H_t[a] = np.array(H_t[a])
g_t[a] = np.array(g_t[a])
print('Safe set construction successful for t = %i, using iteration range %i and time range %i for next time step' % (t, des_num_iters, des_num_ts))
break # Break from while loop
for a in range(n_a):
safe_sets_idxs[a].append(safe_set_cand_t[a])
exploration_spaces[a].append((H_t[a], g_t[a]))
# Keep track of the number of iterations and timesteps in the safe set at each time
ss_t.append(t)
ss_n_its.append(des_num_iters)
ss_n_ts.append(des_num_ts)
des_num_iters = orig_des_num_iters
des_num_ts = orig_des_num_ts
# pdb.set_trace()
# Update index and time ranges accordingly
for i in range(len(ss_t)-1):
if ss_n_its[i+1] < ss_n_its[i] or ss_n_ts[i+1] < ss_n_ts[i]:
for j in range(i+1):
if ss_n_its[i+1] < ss_n_its[j]:
ss_n_its[j] = ss_n_its[i+1]
if ss_n_ts[i+1] < ss_n_ts[j]:
ss_n_ts[j] = ss_n_ts[i+1]
# pdb.set_trace()
for t in range(num_ts-1):
for a in range(n_a):
it_range = range(max(0, num_iters-ss_n_its[t]), num_iters)
ts_range = []
for j in it_range:
i = orig_range.index(j)
ts_range.append(range(min(t, cl_lens[i][a]-1), min(t + ss_n_ts[t], cl_lens[i][a])))
# print(range(min(t, cl_lens[i][a]-1), min(ts_end, cl_lens[i][a])), x_cls[j][a].shape)
ss_idxs = {'it_range' : it_range, 'ts_range' : ts_range}
safe_sets_idxs[a][t] = ss_idxs
safe_set_pos = np.empty((2,0))
for (i, j) in enumerate(safe_sets_idxs[a][t]['it_range']):
safe_set_pos = np.append(safe_set_pos, x_cls[j][a][:2,safe_sets_idxs[a][t]['ts_range'][i]], axis=1)
in_exp_space = (exploration_spaces[a][t][0].dot(safe_set_pos) + exploration_spaces[a][t][1].reshape((-1,1)) <= 0)
if not np.all(in_exp_space):
raise(ValueError('Safe set not contained in exploration space at time %i' % t))
# pdb.set_trace()
return safe_sets_idxs, exploration_spaces
def get_safe_set_cent(x_cls, des_num_ts='all', des_num_iters='all'):
num_ts = 0
num_iters = len(x_cls)
cl_lens = []
if des_num_iters == 'all':
des_num_iters = num_iters
if des_num_ts == 'all':
des_num_ts = num_ts
# Determine starting iteration index and ending time step index
it_start = max(0, num_iters-des_num_iters)
it_range = range(it_start, num_iters)
for i in it_range:
cl_lens.append(x_cls[i].shape[1])
if x_cls[i].shape[1] > num_ts:
num_ts = x_cls[i].shape[1]
safe_set_idxs = []
for t in range(num_ts):
ts_end = t + des_num_ts
print('Constructing safe set from iteration %i to %i and time %i to %i' % (it_start, num_iters-1, t, ts_end-1))
ts_range = []
for i in range(len(it_range)):
ts_range.append(range(min(t, cl_lens[i]-1), min(ts_end, cl_lens[i])))
# ts_range.append(range(cl_lens[i]))
ss_idxs = {'it_range' : it_range, 'ts_range' : ts_range}
safe_set_idxs.append(ss_idxs)
# pdb.set_trace()
return safe_set_idxs
def inspect_safe_set(x, u, safe_sets_idxs, exploration_spaces, plot_lims=None):
n_a = len(x[-1])
n_SS = len(safe_sets_idxs[0])
c = [matplotlib.cm.get_cmap('jet')(i*(1./(n_a-1))) for i in range(n_a)]
plt.ion()
fig = plt.figure()
xy_ax = fig.add_axes([0, 0, 1, 1])
# psi_ax = fig.add_axes([1.1, 0.9, 1, 0.2])
# psi_ax.set_xticks([])
# v_ax = fig.add_axes([1.1, 0.6, 1, 0.2])
# v_ax.set_xticks([])
# df_ax = fig.add_axes([1.1, 0.3, 1, 0.2])
# df_ax.set_xticks([])
# a_ax = fig.add_axes([1.1, 0.0, 1, 0.2])
xy_ax.set_xlabel('x')
xy_ax.set_ylabel('y')
if plot_lims is not None:
xy_ax.set_xlim(plot_lims[0])
xy_ax.set_ylim(plot_lims[1])
xy_ax.set_aspect('equal')
t = 0
new_plot = False
print('step = %i' % t)
while True:
input = raw_input('(debug) ')
# Quit inspector
if input == 'q':
break
# Move forward 1 time step
elif input == 'f':
if t == n_SS-1:
print('End reached')
continue
else:
t += 1
new_plot = True
print('t = %i' % t)
# Move backward 1 time step
elif input == 'b':
if t == 0:
print('Start reached')
continue
else:
t -= 1
new_plot = True
print('t = %i' % t)
else:
print('Input not recognized')
print('Press q to exit, f/b to move forward/backwards through iteration time steps')
if new_plot:
xy_ax.clear()
for a in range(n_a):
ss_x = []
ss_u = []
ss_it_idxs = safe_sets_idxs[a][t]['it_range']
ss_ts_idxs = safe_sets_idxs[a][t]['ts_range']
print(ss_it_idxs, ss_ts_idxs)
for i in ss_it_idxs:
for j in ss_ts_idxs:
ss_x.append(x[i][a][:,j])
ss_u.append(u[i][a][:,j])
ss_x = np.array(ss_x)
ss_u = np.array(ss_u)
H_t = exploration_spaces[a][t][0]
g_t = exploration_spaces[a][t][1]
xy_ax.plot(ss_x[:,0], ss_x[:,1], '.', c=c[a])
y_0 = (-H_t[0,0]*np.array(plot_lims[0])-g_t[0])/H_t[0,1]
y_1 = (-H_t[1,0]*np.array(plot_lims[0])-g_t[1])/H_t[1,1]
xy_ax.plot(plot_lims[0], y_0, c=c[a])
xy_ax.plot(plot_lims[0], y_1, c=c[a])
xy_ax.set_xlabel('x')
xy_ax.set_ylabel('y')
if plot_lims is not None:
xy_ax.set_xlim(plot_lims[0])
xy_ax.set_ylim(plot_lims[1])
xy_ax.set_aspect('equal')
fig.canvas.draw()
new_plot = False
| 35.940594
| 155
| 0.656749
| 4,230
| 21,780
| 3.124586
| 0.07896
| 0.040781
| 0.030264
| 0.017704
| 0.885224
| 0.867443
| 0.852841
| 0.836952
| 0.821896
| 0.818794
| 0
| 0.030489
| 0.198852
| 21,780
| 605
| 156
| 36
| 0.726976
| 0.276997
| 0
| 0.755981
| 0
| 0.004785
| 0.075078
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009569
| false
| 0
| 0.023923
| 0
| 0.04067
| 0.04067
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c306fecb29175b528f66c24c7b7825506001bf6c
| 7,168
|
py
|
Python
|
cinder/volume/drivers/huawei/huawei_18000.py
|
yanheven/cinder
|
89797971f30d547acbf715fea099c52d90966d1f
|
[
"Apache-2.0"
] | null | null | null |
cinder/volume/drivers/huawei/huawei_18000.py
|
yanheven/cinder
|
89797971f30d547acbf715fea099c52d90966d1f
|
[
"Apache-2.0"
] | null | null | null |
cinder/volume/drivers/huawei/huawei_18000.py
|
yanheven/cinder
|
89797971f30d547acbf715fea099c52d90966d1f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2013 - 2014 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Volume Drivers for Huawei OceanStor 18000 storage arrays.
"""
from cinder.volume import driver
from cinder.volume.drivers.huawei import rest_common
from cinder.zonemanager import utils as fczm_utils
class Huawei18000ISCSIDriver(driver.ISCSIDriver):
"""ISCSI driver for Huawei OceanStor 18000 storage arrays.
Version history:
1.0.0 - Initial driver
1.1.0 - Provide Huawei OceanStor 18000 storage volume driver.
"""
VERSION = "1.1.0"
def __init__(self, *args, **kwargs):
super(Huawei18000ISCSIDriver, self).__init__(*args, **kwargs)
def do_setup(self, context):
"""Instantiate common class and log in storage system."""
self.common = rest_common.RestCommon(configuration=self.configuration)
return self.common.login()
def check_for_setup_error(self):
"""Check configuration file."""
return self.common._check_conf_file()
def create_volume(self, volume):
"""Create a volume."""
lun_info = self.common.create_volume(volume)
return {'provider_location': lun_info['ID'],
'lun_info': lun_info}
def create_volume_from_snapshot(self, volume, snapshot):
"""Create a volume from a snapshot."""
lun_info = self.common.create_volume_from_snapshot(volume, snapshot)
return {'provider_location': lun_info['ID'],
'lun_info': lun_info}
def create_cloned_volume(self, volume, src_vref):
"""Create a clone of the specified volume."""
lun_info = self.common.create_cloned_volume(volume, src_vref)
return {'provider_location': lun_info['ID'],
'lun_info': lun_info}
def extend_volume(self, volume, new_size):
"""Extend a volume."""
return self.common.extend_volume(volume, new_size)
def delete_volume(self, volume):
"""Delete a volume."""
return self.common.delete_volume(volume)
def create_snapshot(self, snapshot):
"""Create a snapshot."""
lun_info = self.common.create_snapshot(snapshot)
return {'provider_location': lun_info['ID'],
'lun_info': lun_info}
def delete_snapshot(self, snapshot):
"""Delete a snapshot."""
return self.common.delete_snapshot(snapshot)
def get_volume_stats(self, refresh=False):
"""Get volume stats."""
data = self.common.update_volume_stats()
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.__class__.__name__
data['storage_protocol'] = 'iSCSI'
data['driver_version'] = self.VERSION
return data
def initialize_connection(self, volume, connector):
"""Map a volume to a host."""
return self.common.initialize_connection_iscsi(volume, connector)
def terminate_connection(self, volume, connector, **kwargs):
"""Terminate the map."""
self.common.terminate_connection_iscsi(volume, connector)
def create_export(self, context, volume):
"""Export the volume."""
pass
def ensure_export(self, context, volume):
"""Synchronously recreate an export for a volume."""
pass
def remove_export(self, context, volume):
"""Remove an export for a volume."""
pass
class Huawei18000FCDriver(driver.FibreChannelDriver):
"""FC driver for Huawei OceanStor 18000 storage arrays.
Version history:
1.0.0 - Initial driver
1.1.0 - Provide Huawei OceanStor 18000 storage volume driver.
"""
VERSION = "1.1.0"
def __init__(self, *args, **kwargs):
super(Huawei18000FCDriver, self).__init__(*args, **kwargs)
def do_setup(self, context):
"""Instantiate common class and log in storage system."""
self.common = rest_common.RestCommon(configuration=self.configuration)
return self.common.login()
def check_for_setup_error(self):
"""Check configuration file."""
return self.common._check_conf_file()
def create_volume(self, volume):
"""Create a volume."""
lun_info = self.common.create_volume(volume)
return {'provider_location': lun_info['ID'],
'lun_info': lun_info}
def create_volume_from_snapshot(self, volume, snapshot):
"""Create a volume from a snapshot."""
lun_info = self.common.create_volume_from_snapshot(volume, snapshot)
return {'provider_location': lun_info['ID'],
'lun_info': lun_info}
def create_cloned_volume(self, volume, src_vref):
"""Create a clone of the specified volume."""
lun_info = self.common.create_cloned_volume(volume, src_vref)
return {'provider_location': lun_info['ID'],
'lun_info': lun_info}
def extend_volume(self, volume, new_size):
"""Extend a volume."""
return self.common.extend_volume(volume, new_size)
def delete_volume(self, volume):
"""Delete a volume."""
return self.common.delete_volume(volume)
def create_snapshot(self, snapshot):
"""Create a snapshot."""
lun_info = self.common.create_snapshot(snapshot)
return {'provider_location': lun_info['ID'],
'lun_info': lun_info}
def delete_snapshot(self, snapshot):
"""Delete a snapshot."""
return self.common.delete_snapshot(snapshot)
def get_volume_stats(self, refresh=False):
"""Get volume stats."""
data = self.common.update_volume_stats()
backend_name = self.configuration.safe_get('volume_backend_name')
data['volume_backend_name'] = backend_name or self.__class__.__name__
data['storage_protocol'] = 'FC'
data['driver_version'] = self.VERSION
return data
@fczm_utils.AddFCZone
def initialize_connection(self, volume, connector):
"""Map a volume to a host."""
return self.common.initialize_connection_fc(volume, connector)
@fczm_utils.RemoveFCZone
def terminate_connection(self, volume, connector, **kwargs):
"""Terminate the map."""
return self.common.terminate_connection_fc(volume, connector)
def create_export(self, context, volume):
"""Export the volume."""
pass
def ensure_export(self, context, volume):
"""Synchronously recreate an export for a volume."""
pass
def remove_export(self, context, volume):
"""Remove an export for a volume."""
pass
| 35.84
| 78
| 0.660296
| 871
| 7,168
| 5.222732
| 0.177956
| 0.049242
| 0.045724
| 0.029897
| 0.810508
| 0.802814
| 0.7949
| 0.778193
| 0.778193
| 0.778193
| 0
| 0.013592
| 0.23019
| 7,168
| 199
| 79
| 36.020101
| 0.810801
| 0.25279
| 0
| 0.851485
| 0
| 0
| 0.071776
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.316832
| false
| 0.059406
| 0.029703
| 0
| 0.613861
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 10
|
c31358a4631ecf42c0aeb79f2781e46081c1830c
| 9,290
|
py
|
Python
|
morphablegraphs/motion_generator/algorithm_configuration.py
|
dfki-asr/morphablegraphs
|
02c77aab72aa4b58f4067c720f5d124f0be3ea80
|
[
"MIT"
] | 5
|
2020-03-03T21:07:01.000Z
|
2021-05-12T16:59:28.000Z
|
morphablegraphs/motion_generator/algorithm_configuration.py
|
dfki-asr/morphablegraphs
|
02c77aab72aa4b58f4067c720f5d124f0be3ea80
|
[
"MIT"
] | null | null | null |
morphablegraphs/motion_generator/algorithm_configuration.py
|
dfki-asr/morphablegraphs
|
02c77aab72aa4b58f4067c720f5d124f0be3ea80
|
[
"MIT"
] | 1
|
2020-07-20T06:57:08.000Z
|
2020-07-20T06:57:08.000Z
|
#!/usr/bin/env python
#
# Copyright 2019 DFKI GmbH.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the
# following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
# -*- coding: utf-8 -*-
import collections
OLD_ALGORITHM_CONFIG = {
"smoothing_settings": {
"spatial_smoothing": True,
"time_smoothing": False,
"spatial_smoothing_method": "smoothing",
"spatial_smoothing_window": 20,
"time_smoothing_window": 15,
"apply_foot_alignment": True,
"root_filter_window": 0
},
"trajectory_following_settings": {
"spline_type": 0,
"control_point_filter_threshold": 0,
"dir_constraint_factor": 0.1,
"heuristic_step_length_factor": 1.0,
"position_constraint_factor": 1.0,
"step_length_approx_method": "arc_length",
"transition_pose_constraint_factor": 0.6,
"closest_point_search_accuracy": 0.001,
"closest_point_search_max_iterations": 5000,
"look_ahead_distance": 100,
"end_step_length_factor": 1.0,
"max_distance_to_path": 500,
"arc_length_granularity": 1000,
"use_transition_constraint" : False,
"spline_super_sampling_factor": 20,
"constrain_start_orientation": True,
"constrain_transition_orientation": True,
"generate_half_step_constraint": True,
"generate_foot_plant_constraints": False
},
"local_optimization_settings": {
"start_error_threshold": 0.0,
"error_scale_factor": 1.0,
"spatial_epsilon": 0.0,
"quality_scale_factor": 1.0,
"tolerance": 0.05,
"method": "leastsq",
"max_iterations": 500,
"verbose": False
},
"global_spatial_optimization_settings": {
"max_steps": 3,
"start_error_threshold": 4.0,
"error_scale_factor": 1.0,
"quality_scale_factor": 100.0,
"tolerance": 0.05,
"method": "leastsq",
"max_iterations": 500,
"position_weight": 1000.0,
"orientation_weight": 1000.0,
"verbose": False
},
"global_time_optimization_settings": {
"error_scale_factor": 1.0,
"quality_scale_factor": 0.0001,
"tolerance": 0.05,
"method": "L-BFGS-B",
"max_iterations": 500,
"optimized_actions": 2,
"verbose": False
},
"inverse_kinematics_settings":{
"tolerance": 0.05,
"optimization_method": "L-BFGS-B",
"max_iterations": 1000,
"interpolation_window": 120,
"transition_window": 60,
"use_euler_representation": False,
"solving_method": "unconstrained",
"activate_look_at": True,
"max_retries": 5,
"success_threshold": 5.0,
"optimize_orientation": True,
"elementary_action_max_iterations": 5,
"elementary_action_optimization_eps": 1.0,
"adapt_hands_during_carry_both": True,
"constrain_place_orientation": False
},
"motion_grounding_settings":{
"activate_blending": True,
"generate_foot_plant_constraints": True,
"foot_lift_search_window": 40,
"foot_lift_tolerance": 3.0,
"graph_walk_grounding_window": 4,
"contact_tolerance": 1.0,
"constraint_range": 10,
"smoothing_constraints_window": 8
},
"n_random_samples": 100,
"average_elementary_action_error_threshold": 500,
"constrained_sampling_mode": "cluster_tree_search",
"activate_inverse_kinematics": True,
"activate_motion_grounding": True,
"n_cluster_search_candidates": 4,
"use_transition_model": False,
"local_optimization_mode": "all",
"activate_parameter_check": False,
"use_global_time_optimization": True,
"global_spatial_optimization_mode": "trajectory_end",
"collision_avoidance_constraints_mode": "direct_connection",
"optimize_collision_avoidance_constraints_extra": False,
"use_constrained_gmm": False,
"use_constraints": True,
"use_local_coordinates": True,
"use_semantic_annotation_with_mgrd": False,
"activate_time_variation": True,
"debug_max_step": -1,
"verbose": False
}
DEFAULT_ALGORITHM_CONFIG = collections.OrderedDict({
"smoothing_settings": {
"spatial_smoothing" : True,
"time_smoothing" : False,
"spatial_smoothing_method": "smoothing",
"spatial_smoothing_window": 20,
"time_smoothing_window": 15,
"apply_foot_alignment": False,
"root_filter_window": 0
},
"trajectory_following_settings": {
"spline_type": 0,
"control_point_filter_threshold": 0,
"dir_constraint_factor": 0.8,
"heuristic_step_length_factor": 1.0,
"position_constraint_factor": 1.0,
"step_length_approx_method": "arc_length",
"transition_pose_constraint_factor": 0.6,
"closest_point_search_accuracy": 0.001,
"closest_point_search_max_iterations": 5000,
"look_ahead_distance": 100,
"end_step_length_factor": 1.0,
"max_distance_to_path": 500,
"arc_length_granularity": 1000,
"use_transition_constraint": False,
"spline_super_sampling_factor": 20,
"constrain_start_orientation": True,
"constrain_transition_orientation": True,
"generate_half_step_constraint": False,
"generate_foot_plant_constraints": False
},
"local_optimization_settings": {
"start_error_threshold": 0.0,
"error_scale_factor": 1.0,
"spatial_epsilon": 0.0,
"quality_scale_factor": 0.1,
"tolerance": 0.05,
"method": "leastsq",#"L-BFGS-B",#
"max_iterations": 500,
"verbose": False,
"diff_eps": 1.0
},
"global_spatial_optimization_settings": {
"max_steps": 3,
"start_error_threshold": 4.0,
"error_scale_factor": 1.0,
"quality_scale_factor": 100.0,
"tolerance": 0.05,
"method": "leastsq",
"max_iterations": 500,
"position_weight": 1000.0,
"orientation_weight": 1000.0,
"verbose": False,
"diff_eps": 2.0
},
"global_time_optimization_settings": {
"error_scale_factor": 1.0,
"quality_scale_factor": 0.0001,
"tolerance": 0.05,
"method": "L-BFGS-B",
"max_iterations": 500,
"optimized_actions": 2,
"verbose": False,
"diff_eps": 1.0
},
"inverse_kinematics_settings":{
"tolerance": 0.05,
"optimization_method": "L-BFGS-B",
"max_iterations": 1000,
"interpolation_window": 120,
"transition_window": 60,
"use_euler_representation": False,
"solving_method": "unconstrained",
"activate_look_at": True,
"max_retries": 5,
"success_threshold": 5.0,
"optimize_orientation": True,
"elementary_action_max_iterations": 5,
"elementary_action_optimization_eps": 1.0,
"adapt_hands_during_carry_both": True,
"constrain_place_orientation": False,
"activate_blending": True
},
"motion_grounding_settings":{
"activate_blending": True,
"generate_foot_plant_constraints": True,
"foot_lift_search_window": 40,
"foot_lift_tolerance": 3.0,
"graph_walk_grounding_window": 4,
"contact_tolerance": 1.0,
"constraint_range": 10,
"smoothing_constraints_window": 8,
"damp_angle": 0.01,
"damp_factor": 1.0
},
"n_random_samples": 100,
"average_elementary_action_error_threshold": 500,
"constrained_sampling_mode": "cluster_tree_search",
"activate_inverse_kinematics": True,
"activate_motion_grounding": False,
"n_cluster_search_candidates": 4,
"use_transition_model": False,
"local_optimization_mode": "all",
"activate_parameter_check": False,
"use_global_time_optimization": True,
"global_spatial_optimization_mode": "none",
"collision_avoidance_constraints_mode": "direct_connection",
"optimize_collision_avoidance_constraints_extra": False,
"use_constrained_gmm": False,
"use_constraints": True,
"use_local_coordinates": True,
"use_semantic_annotation_with_mgrd": False,
"activate_time_variation": True,
"debug_max_step": -1,
"verbose": False
})
| 36.007752
| 75
| 0.653068
| 1,041
| 9,290
| 5.450528
| 0.241114
| 0.00705
| 0.019739
| 0.016038
| 0.815298
| 0.807896
| 0.798731
| 0.798731
| 0.798731
| 0.791329
| 0
| 0.03772
| 0.235199
| 9,290
| 257
| 76
| 36.14786
| 0.760873
| 0.118407
| 0
| 0.828194
| 0
| 0
| 0.560441
| 0.342192
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.004405
| 0
| 0.004405
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3457eb2d3ae39d31b1f47b47cbf69d4db4d967a
| 231
|
py
|
Python
|
tests/test_demo_python_project_backstage.py
|
lucasmelin/demo-python-project-backstage
|
fe67c2c69eb9f882739107b71bc32eb996fabdb5
|
[
"MIT"
] | null | null | null |
tests/test_demo_python_project_backstage.py
|
lucasmelin/demo-python-project-backstage
|
fe67c2c69eb9f882739107b71bc32eb996fabdb5
|
[
"MIT"
] | null | null | null |
tests/test_demo_python_project_backstage.py
|
lucasmelin/demo-python-project-backstage
|
fe67c2c69eb9f882739107b71bc32eb996fabdb5
|
[
"MIT"
] | null | null | null |
"""Tests for `demo_python_project_backstage` package."""
import pytest
import demo_python_project_backstage
def test_version():
"""Verify the package version."""
assert demo_python_project_backstage.__version__ == "0.1.0"
| 28.875
| 63
| 0.770563
| 30
| 231
| 5.466667
| 0.566667
| 0.182927
| 0.310976
| 0.47561
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014706
| 0.116883
| 231
| 7
| 64
| 33
| 0.789216
| 0.337662
| 0
| 0
| 0
| 0
| 0.035211
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c353a90a19d48e2ada6f5c5eb6961d4a161d597b
| 37,051
|
py
|
Python
|
src/logic/azext_logic/vendored_sdks/logic/operations/_integration_account_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 207
|
2017-11-29T06:59:41.000Z
|
2022-03-31T10:00:53.000Z
|
src/logic/azext_logic/vendored_sdks/logic/operations/_integration_account_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 4,061
|
2017-10-27T23:19:56.000Z
|
2022-03-31T23:18:30.000Z
|
src/logic/azext_logic/vendored_sdks/logic/operations/_integration_account_operations.py
|
Mannan2812/azure-cli-extensions
|
e2b34efe23795f6db9c59100534a40f0813c3d95
|
[
"MIT"
] | 802
|
2017-10-11T17:36:26.000Z
|
2022-03-31T22:24:32.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Any, Callable, Dict, Generic, List, Optional, TypeVar, Union
import warnings
from azure.core.exceptions import HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from .. import models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class IntegrationAccountOperations(object):
"""IntegrationAccountOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~logic_management_client.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list_by_subscription(
self,
top=None, # type: Optional[int]
**kwargs # type: Any
):
# type: (...) -> "models.IntegrationAccountListResult"
"""Gets a list of integration accounts by subscription.
:param top: The number of items to be included in the result.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IntegrationAccountListResult or the result of cls(response)
:rtype: ~logic_management_client.models.IntegrationAccountListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationAccountListResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-05-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_subscription.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
else:
url = next_link
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('IntegrationAccountListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_subscription.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Logic/integrationAccounts'}
def list_by_resource_group(
self,
resource_group_name, # type: str
top=None, # type: Optional[int]
**kwargs # type: Any
):
# type: (...) -> "models.IntegrationAccountListResult"
"""Gets a list of integration accounts by resource group.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param top: The number of items to be included in the result.
:type top: int
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IntegrationAccountListResult or the result of cls(response)
:rtype: ~logic_management_client.models.IntegrationAccountListResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationAccountListResult"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-05-01"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
else:
url = next_link
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if top is not None:
query_parameters['$top'] = self._serialize.query("top", top, 'int')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('IntegrationAccountListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts'}
def get(
self,
resource_group_name, # type: str
integration_account_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "models.IntegrationAccount"
"""Gets an integration account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IntegrationAccount or the result of cls(response)
:rtype: ~logic_management_client.models.IntegrationAccount
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationAccount"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-05-01"
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = 'application/json'
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IntegrationAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}'}
def create_or_update(
self,
resource_group_name, # type: str
integration_account_name, # type: str
location=None, # type: Optional[str]
tags=None, # type: Optional[Dict[str, str]]
sku=None, # type: Optional["models.IntegrationAccountSku"]
integration_service_environment=None, # type: Optional["models.IntegrationServiceEnvironment"]
state=None, # type: Optional[Union[str, "models.WorkflowState"]]
**kwargs # type: Any
):
# type: (...) -> "models.IntegrationAccount"
"""Creates or updates an integration account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:param location: The resource location.
:type location: str
:param tags: The resource tags.
:type tags: dict[str, str]
:param sku: The sku.
:type sku: ~logic_management_client.models.IntegrationAccountSku
:param integration_service_environment: The integration service environment.
:type integration_service_environment: ~logic_management_client.models.IntegrationServiceEnvironment
:param state: The workflow state.
:type state: str or ~logic_management_client.models.WorkflowState
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IntegrationAccount or the result of cls(response)
:rtype: ~logic_management_client.models.IntegrationAccount or ~logic_management_client.models.IntegrationAccount
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationAccount"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
_integration_account = models.IntegrationAccount(location=location, tags=tags, sku=sku, integration_service_environment=integration_service_environment, state=state)
api_version = "2019-05-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_integration_account, 'IntegrationAccount')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('IntegrationAccount', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('IntegrationAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}'}
def update(
self,
resource_group_name, # type: str
integration_account_name, # type: str
location=None, # type: Optional[str]
tags=None, # type: Optional[Dict[str, str]]
sku=None, # type: Optional["models.IntegrationAccountSku"]
integration_service_environment=None, # type: Optional["models.IntegrationServiceEnvironment"]
state=None, # type: Optional[Union[str, "models.WorkflowState"]]
**kwargs # type: Any
):
# type: (...) -> "models.IntegrationAccount"
"""Updates an integration account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:param location: The resource location.
:type location: str
:param tags: The resource tags.
:type tags: dict[str, str]
:param sku: The sku.
:type sku: ~logic_management_client.models.IntegrationAccountSku
:param integration_service_environment: The integration service environment.
:type integration_service_environment: ~logic_management_client.models.IntegrationServiceEnvironment
:param state: The workflow state.
:type state: str or ~logic_management_client.models.WorkflowState
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IntegrationAccount or the result of cls(response)
:rtype: ~logic_management_client.models.IntegrationAccount
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationAccount"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
_integration_account = models.IntegrationAccount(location=location, tags=tags, sku=sku, integration_service_environment=integration_service_environment, state=state)
api_version = "2019-05-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_integration_account, 'IntegrationAccount')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IntegrationAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}'}
def delete(
self,
resource_group_name, # type: str
integration_account_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Deletes an integration account.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
api_version = "2019-05-01"
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}'}
def list_callback_url(
self,
resource_group_name, # type: str
integration_account_name, # type: str
not_after=None, # type: Optional[datetime.datetime]
key_type=None, # type: Optional[Union[str, "models.KeyType"]]
**kwargs # type: Any
):
# type: (...) -> "models.CallbackUrl"
"""Gets the integration account callback URL.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:param not_after: The expiry time.
:type not_after: ~datetime.datetime
:param key_type: The key type.
:type key_type: str or ~logic_management_client.models.KeyType
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CallbackUrl or the result of cls(response)
:rtype: ~logic_management_client.models.CallbackUrl
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.CallbackUrl"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
_parameters = models.GetCallbackUrlParameters(not_after=not_after, key_type=key_type)
api_version = "2019-05-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.list_callback_url.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_parameters, 'GetCallbackUrlParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('CallbackUrl', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_callback_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/listCallbackUrl'}
def list_key_vault_key(
self,
resource_group_name, # type: str
integration_account_name, # type: str
key_vault, # type: "models.KeyVaultReference"
skip_token=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "models.KeyVaultKeyCollection"
"""Gets the integration account's Key Vault keys.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:param key_vault: The key vault reference.
:type key_vault: ~logic_management_client.models.KeyVaultReference
:param skip_token: The skip token.
:type skip_token: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: KeyVaultKeyCollection or the result of cls(response)
:rtype: ~logic_management_client.models.KeyVaultKeyCollection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.KeyVaultKeyCollection"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
_list_key_vault_keys = models.ListKeyVaultKeysDefinition(key_vault=key_vault, skip_token=skip_token)
api_version = "2019-05-01"
content_type = "application/json"
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_key_vault_key.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
else:
url = next_link
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_list_key_vault_keys, 'ListKeyVaultKeysDefinition')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('KeyVaultKeyCollection', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize(models.ErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_key_vault_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/listKeyVaultKeys'}
def log_tracking_event(
self,
resource_group_name, # type: str
integration_account_name, # type: str
source_type, # type: str
events, # type: List["TrackingEvent"]
track_events_options=None, # type: Optional[Union[str, "models.TrackEventsOperationOptions"]]
**kwargs # type: Any
):
# type: (...) -> None
"""Logs the integration account's tracking events.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:param source_type: The source type.
:type source_type: str
:param events: The events.
:type events: list[~logic_management_client.models.TrackingEvent]
:param track_events_options: The track events options.
:type track_events_options: str or ~logic_management_client.models.TrackEventsOperationOptions
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
_log_tracking_events = models.TrackingEventsDefinition(source_type=source_type, track_events_options=track_events_options, events=events)
api_version = "2019-05-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.log_tracking_event.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_log_tracking_events, 'TrackingEventsDefinition')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)
if cls:
return cls(pipeline_response, None, {})
log_tracking_event.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/logTrackingEvents'}
def regenerate_access_key(
self,
resource_group_name, # type: str
integration_account_name, # type: str
key_type=None, # type: Optional[Union[str, "models.KeyType"]]
**kwargs # type: Any
):
# type: (...) -> "models.IntegrationAccount"
"""Regenerates the integration account access key.
:param resource_group_name: The resource group name.
:type resource_group_name: str
:param integration_account_name: The integration account name.
:type integration_account_name: str
:param key_type: The key type.
:type key_type: str or ~logic_management_client.models.KeyType
:keyword callable cls: A custom type or function that will be passed the direct response
:return: IntegrationAccount or the result of cls(response)
:rtype: ~logic_management_client.models.IntegrationAccount
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["models.IntegrationAccount"]
error_map = kwargs.pop('error_map', {404: ResourceNotFoundError, 409: ResourceExistsError})
_regenerate_access_key = models.RegenerateActionParameter(key_type=key_type)
api_version = "2019-05-01"
content_type = kwargs.pop("content_type", "application/json")
# Construct URL
url = self.regenerate_access_key.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'integrationAccountName': self._serialize.url("integration_account_name", integration_account_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = 'application/json'
# Construct and send request
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(_regenerate_access_key, 'RegenerateActionParameter')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize(models.ErrorResponse, response)
raise HttpResponseError(response=response, model=error)
deserialized = self._deserialize('IntegrationAccount', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
regenerate_access_key.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Logic/integrationAccounts/{integrationAccountName}/regenerateAccessKey'}
| 49.139257
| 205
| 0.674503
| 3,915
| 37,051
| 6.148659
| 0.064623
| 0.044865
| 0.038136
| 0.015121
| 0.877617
| 0.86773
| 0.859422
| 0.853938
| 0.853938
| 0.853938
| 0
| 0.006373
| 0.225041
| 37,051
| 753
| 206
| 49.204515
| 0.831992
| 0.28261
| 0
| 0.799065
| 0
| 0
| 0.155804
| 0.08601
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046729
| false
| 0
| 0.018692
| 0
| 0.126168
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c377b6c70377bf4fe0b714a5f94efe013a6f6cf2
| 18,129
|
py
|
Python
|
tests/models/test_models.py
|
reputage/didery
|
f94a3cf63a7be2a341fa06d173d068924e540e41
|
[
"Apache-2.0"
] | 8
|
2018-09-07T09:26:52.000Z
|
2021-01-16T12:22:07.000Z
|
tests/models/test_models.py
|
reputage/didery
|
f94a3cf63a7be2a341fa06d173d068924e540e41
|
[
"Apache-2.0"
] | 184
|
2018-04-19T17:46:02.000Z
|
2019-05-21T19:04:30.000Z
|
tests/models/test_models.py
|
reputage/didery
|
f94a3cf63a7be2a341fa06d173d068924e540e41
|
[
"Apache-2.0"
] | 3
|
2018-09-26T19:16:30.000Z
|
2018-12-18T18:50:40.000Z
|
import arrow
try:
import simplejson as json
except ImportError:
import json
import didery.crypto.eddsa
from didery.models.models import ValidatedHistoryModel, ValidatedEventsModel, BasicHistoryModel, DataModel
from didery.help import helping as h
SK = b"\xb3\xd0\xbdL]\xcc\x08\x90\xa5\xbd\xc6\xa1 '\x82\x9c\x18\xecf\xa6x\xe2]Ux\xa5c\x0f\xe2\x86*\xa04\xe7\xfaf\x08o\x18\xd6\xc5s\xfc+\xdc \xb4\xb4\xa6G\xcfZ\x96\x01\x1e%\x0f\x96\x8c\xfa-3J<"
VK = b"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
DID = "did:dad:NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
def testDataModel():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
test_model = DataModel(data)
assert test_model.data == data
def testDataModelToJson():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
test_model = DataModel(data)
assert test_model.toJson() == json.dumps(data)
def testDataModelToBytes():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
test_model = DataModel(data)
assert test_model.toBytes() == json.dumps(data).encode()
def testDataModelFromJson():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
test_model = DataModel({})
assert test_model.data == {}
test_model.fromJson(json.dumps(data))
assert test_model.data == data
def testDataModelFromBytes():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
test_model = DataModel({})
assert test_model.data == {}
test_model.fromBytes(json.dumps(data).encode())
assert test_model.data == data
def testBasicHistoryModel():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
test_model = BasicHistoryModel(data)
assert test_model.data == data
assert test_model.history == data
assert test_model.id == DID
assert test_model.changed == data['changed']
assert test_model.parsedChanged == arrow.get("2000-01-01T00:00:01+00:00")
assert test_model.signer == 0
assert test_model.signers == ["NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="]
def testValidatedHistoryModel():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
sigs = [didery.crypto.eddsa.signResource(json.dumps(data).encode(), SK)]
history = {
"history": data,
"signatures": sigs,
}
test_data = [
history
]
test_model = ValidatedHistoryModel(test_data, "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=")
assert test_model.mode == "method"
assert test_model.vk == "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
assert test_model.index == 0
assert test_model.id == DID
assert test_model.changed == data['changed']
assert test_model.parsedChanged == arrow.get("2000-01-01T00:00:01+00:00")
assert test_model.signer == 0
assert test_model.signers == ["NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="]
assert test_model.history == data
assert test_model.signatures == sigs
assert test_model.selected == history
def testValidatedHistoryModelSetSelected():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
sigs = [didery.crypto.eddsa.signResource(json.dumps(data).encode(), SK)]
history = {
"history": data,
"signatures": sigs,
}
seed = b'\x92[\xcb\xf4\xee5+\xcf\xd4b*%/\xabw8\xd4d\xa2\xf8\xad\xa7U\x19,\xcfS\x12\xa6l\xba"'
vk, sk, did, body = didery.crypto.eddsa.genDidHistory(seed, signer=0, numSigners=2)
vk = h.bytesToStr64u(vk)
data2 = json.loads(body)
data2["id"] = DID
sigs2 = [didery.crypto.eddsa.signResource(json.dumps(data2).encode(), sk)]
history2 = {
"history": data2,
"signatures": sigs2
}
test_data = [
history,
history2
]
test_model = ValidatedHistoryModel(test_data, vk)
assert test_model.vk == vk
assert test_model.index == 1
def testEmptyValidatedHistoryModel():
test_model = ValidatedHistoryModel(None)
assert test_model.vk is None
vk = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
test_model.selected = vk
assert test_model.vk == vk
assert test_model.index is None
def testValidateHistoryModelUpdate():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
sigs = [didery.crypto.eddsa.signResource(json.dumps(data).encode(), SK)]
history = {
"history": data,
"signatures": sigs,
}
test_data = [
history
]
test_model = ValidatedHistoryModel(test_data)
seed = b'\x92[\xcb\xf4\xee5+\xcf\xd4b*%/\xabw8\xd4d\xa2\xf8\xad\xa7U\x19,\xcfS\x12\xa6l\xba"'
vk, sk, did, body = didery.crypto.eddsa.genDidHistory(seed, signer=0, numSigners=2)
vk = h.bytesToStr64u(vk)
data2 = json.loads(body)
data2["id"] = DID
sigs2 = [didery.crypto.eddsa.signResource(json.dumps(data2).encode(), sk)]
history2 = {
"history": data2,
"signatures": sigs2
}
test_data = [
history2
]
test_model.update(0, history2)
assert test_model.data == test_data
def testValidateHistoryModelUpdateBadIndex():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
sigs = [didery.crypto.eddsa.signResource(json.dumps(data).encode(), SK)]
history = {
"history": data,
"signatures": sigs,
}
test_data = [
history
]
test_model = ValidatedHistoryModel(test_data)
history2 = {}
outOfRange = 1
test_model.update(outOfRange, history2)
assert test_model.data == test_data
def testValidateHistoryModelUpdateNone():
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=",
"NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
]
}
sigs = [didery.crypto.eddsa.signResource(json.dumps(data).encode(), SK)]
history = {
"history": data,
"signatures": sigs,
}
test_data = [
history
]
test_model = ValidatedHistoryModel(test_data, "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw=")
history2 = None
test_model.update(0, history2)
assert test_model.data == [None]
class TestEventsModel:
def test_model_creation(self):
vk = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk,
vk
]
}
sigs = [didery.crypto.eddsa.signResource(json.dumps(data).encode(), SK)]
event_data = {
"event": data,
"signatures": sigs,
}
test_data = [
[
event_data,
event_data
],
[
event_data
]
]
test_model = ValidatedEventsModel(test_data)
for events in test_model.data:
for event in events:
assert event.data == event_data
def test_find(self):
vk1 = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data1 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk1,
vk1
]
}
vk2 = "45NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zS="
data2 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk2,
vk2
]
}
sigs1 = [didery.crypto.eddsa.signResource(json.dumps(data1).encode(), SK)]
sigs2 = [didery.crypto.eddsa.signResource(json.dumps(data2).encode(), SK)]
event1 = {
"event": data1,
"signatures": sigs1,
}
event2 = {
"event": data2,
"signatures": sigs2,
}
test_data = [
[
event1,
event1
],
[
event2
]
]
test_model = ValidatedEventsModel(test_data)
index = test_model.find(vk1)
assert index == 0
test_model = ValidatedEventsModel(test_data)
index = test_model.find(vk2)
assert index == 1
def test_find_non_existent(self):
vk = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk,
vk
]
}
sigs = [didery.crypto.eddsa.signResource(json.dumps(data).encode(), SK)]
event = {
"event": data,
"signatures": sigs,
}
bad_vk = "45NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zS="
test_data = [
[
event,
event
]
]
test_model = ValidatedEventsModel(test_data)
index = test_model.find(bad_vk)
assert index is None
def test_find_empty_data(self):
test_data = []
vk = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
test_model = ValidatedEventsModel(test_data)
index = test_model.find(vk)
assert index is None
test_model = ValidatedEventsModel(None)
index = test_model.find(vk)
assert index is None
def test_to_dict(self):
vk1 = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data1 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk1,
vk1
]
}
vk2 = "45NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zS="
data2 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk2,
vk2
]
}
sigs1 = [didery.crypto.eddsa.signResource(json.dumps(data1).encode(), SK)]
sigs2 = [didery.crypto.eddsa.signResource(json.dumps(data2).encode(), SK)]
event1 = {
"event": data1,
"signatures": sigs1,
}
event2 = {
"event": data2,
"signatures": sigs2,
}
test_data = [
[
event1,
event1
],
[
event2
]
]
test_model = ValidatedEventsModel(test_data)
data = test_model.to_dict()
assert data == test_data
def test_to_list(self):
vk1 = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data1 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk1,
vk1
]
}
vk2 = "45NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zS="
data2 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk2,
vk2
]
}
sigs1 = [didery.crypto.eddsa.signResource(json.dumps(data1).encode(), SK)]
sigs2 = [didery.crypto.eddsa.signResource(json.dumps(data2).encode(), SK)]
event1 = {
"event": data1,
"signatures": sigs1,
}
event2 = {
"event": data2,
"signatures": sigs2,
}
test_data = [
[
event1,
event1
],
[
event2
]
]
test_model = ValidatedEventsModel(test_data)
data = test_model.to_list()
assert data == test_data
def test_to_json(self):
vk1 = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data1 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk1,
vk1
]
}
vk2 = "45NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zS="
data2 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk2,
vk2
]
}
sigs1 = [didery.crypto.eddsa.signResource(json.dumps(data1).encode(), SK)]
sigs2 = [didery.crypto.eddsa.signResource(json.dumps(data2).encode(), SK)]
event1 = {
"event": data1,
"signatures": sigs1,
}
event2 = {
"event": data2,
"signatures": sigs2,
}
test_data = [
[
event1,
event1
],
[
event2
]
]
test_model = ValidatedEventsModel(test_data)
data = test_model.toJson()
assert data == json.dumps(test_data)
def test_to_bytes(self):
vk1 = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data1 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk1,
vk1
]
}
vk2 = "45NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zS="
data2 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk2,
vk2
]
}
sigs1 = [didery.crypto.eddsa.signResource(json.dumps(data1).encode(), SK)]
sigs2 = [didery.crypto.eddsa.signResource(json.dumps(data2).encode(), SK)]
event1 = {
"event": data1,
"signatures": sigs1,
}
event2 = {
"event": data2,
"signatures": sigs2,
}
test_data = [
[
event1,
event1
],
[
event2
]
]
test_model = ValidatedEventsModel(test_data)
data = test_model.toBytes()
assert data == json.dumps(test_data).encode()
def test_from_json(self):
vk1 = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data1 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk1,
vk1
]
}
sigs1 = [didery.crypto.eddsa.signResource(json.dumps(data1).encode(), SK)]
event_data = {
"event": data1,
"signatures": sigs1,
}
test_data = [
[
event_data,
event_data
],
[
event_data
]
]
test_model = ValidatedEventsModel(None)
test_model.fromJson(json.dumps(test_data))
for events in test_model.data:
for event in events:
assert event.data == event_data
def test_from_bytes(self):
vk1 = "NOf6ZghvGNbFc_wr3CC0tKZHz1qWAR4lD5aM-i0zSjw="
data1 = {
"id": DID,
"signer": 0,
"changed": "2000-01-01T00:00:01+00:00",
"signers": [
vk1,
vk1
]
}
sigs1 = [didery.crypto.eddsa.signResource(json.dumps(data1).encode(), SK)]
event_data = {
"event": data1,
"signatures": sigs1,
}
test_data = [
[
event_data,
event_data
],
[
event_data
]
]
test_model = ValidatedEventsModel(None)
test_model.fromBytes(json.dumps(test_data).encode())
for events in test_model.data:
for event in events:
assert event.data == event_data
| 26.160173
| 192
| 0.522423
| 1,651
| 18,129
| 5.613568
| 0.099334
| 0.075745
| 0.185801
| 0.037872
| 0.852611
| 0.834916
| 0.80546
| 0.791649
| 0.779133
| 0.744066
| 0
| 0.093884
| 0.357824
| 18,129
| 692
| 193
| 26.197977
| 0.702199
| 0
| 0
| 0.682594
| 0
| 0.005119
| 0.223289
| 0.173369
| 0
| 0
| 0
| 0
| 0.076792
| 1
| 0.037543
| false
| 0
| 0.011945
| 0
| 0.051195
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5edbfbef34d778ca32f889680879979c7db8adc2
| 13,854
|
py
|
Python
|
tests/test_starlette_middleware_logging.py
|
Colin-b/layab
|
1b700f2681b39f77f35be56564990d6e3fe982d3
|
[
"MIT"
] | 3
|
2019-11-29T18:39:51.000Z
|
2019-12-14T22:22:52.000Z
|
tests/test_starlette_middleware_logging.py
|
Colin-b/layab
|
1b700f2681b39f77f35be56564990d6e3fe982d3
|
[
"MIT"
] | 4
|
2019-11-29T14:10:28.000Z
|
2020-10-09T13:25:09.000Z
|
tests/test_starlette_middleware_logging.py
|
Colin-b/layab
|
1b700f2681b39f77f35be56564990d6e3fe982d3
|
[
"MIT"
] | null | null | null |
import logging
import pytest
from requests import Request
from starlette.applications import Starlette
from starlette.endpoints import HTTPEndpoint
from starlette.middleware import Middleware
from starlette.responses import PlainTextResponse
from starlette.testclient import TestClient
import layab.starlette
@pytest.fixture
def client():
app = Starlette(
middleware=[
Middleware(layab.starlette.LoggingMiddleware, skip_paths=["/skipped"])
]
)
@app.route("/logging")
class Logging(HTTPEndpoint):
def get(self, request: Request):
return PlainTextResponse("")
def post(self, request: Request):
return PlainTextResponse("")
def put(self, request: Request):
return PlainTextResponse("")
def delete(self, request: Request):
return PlainTextResponse("")
@app.route("/logging_failure")
class LoggingFailure(HTTPEndpoint):
def get(self, request: Request):
raise Exception("Error message")
def post(self, request: Request):
raise Exception("Error message")
def put(self, request: Request):
raise Exception("Error message")
def delete(self, request: Request):
raise Exception("Error message")
@app.route("/skipped")
class Skipped(HTTPEndpoint):
def get(self, request: Request):
return PlainTextResponse("")
def post(self, request: Request):
return PlainTextResponse("")
def put(self, request: Request):
return PlainTextResponse("")
def delete(self, request: Request):
return PlainTextResponse("")
return TestClient(app, raise_server_exceptions=False)
@pytest.fixture
def mock_uuid(monkeypatch):
class UUIDMock:
@staticmethod
def uuid4():
return "1-2-3-4-5"
monkeypatch.setattr(layab.starlette, "uuid", UUIDMock)
def test_log_get_request_details(client, caplog, mock_uuid):
caplog.set_level(logging.INFO)
response = client.get("/logging")
assert response.status_code == 200
assert response.text == ""
assert len(caplog.messages) == 2
assert eval(caplog.messages[0]) == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "GET",
"request_status": "start",
"request_url.path": "/logging",
}
end_message = eval(caplog.messages[1])
end_message.pop("request_processing_time")
assert end_message == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "GET",
"request_status": "success",
"request_status_code": 200,
"request_url.path": "/logging",
}
def test_log_delete_request_details(client, caplog, mock_uuid):
caplog.set_level(logging.INFO)
response = client.delete("/logging")
assert response.status_code == 200
assert response.text == ""
assert len(caplog.messages) == 2
assert eval(caplog.messages[0]) == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "DELETE",
"request_status": "start",
"request_url.path": "/logging",
}
end_message = eval(caplog.messages[1])
end_message.pop("request_processing_time")
assert end_message == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "DELETE",
"request_status": "success",
"request_status_code": 200,
"request_url.path": "/logging",
}
def test_log_post_request_details(client, caplog, mock_uuid):
caplog.set_level(logging.INFO)
response = client.post("/logging")
assert response.status_code == 200
assert response.text == ""
assert len(caplog.messages) == 2
assert eval(caplog.messages[0]) == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "POST",
"request_status": "start",
"request_url.path": "/logging",
}
end_message = eval(caplog.messages[1])
end_message.pop("request_processing_time")
assert end_message == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "POST",
"request_status": "success",
"request_status_code": 200,
"request_url.path": "/logging",
}
def test_log_put_request_details(client, caplog, mock_uuid):
caplog.set_level(logging.INFO)
response = client.put("/logging")
assert response.status_code == 200
assert response.text == ""
assert len(caplog.messages) == 2
assert eval(caplog.messages[0]) == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "PUT",
"request_status": "start",
"request_url.path": "/logging",
}
end_message = eval(caplog.messages[1])
end_message.pop("request_processing_time")
assert end_message == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "PUT",
"request_status": "success",
"request_status_code": 200,
"request_url.path": "/logging",
}
def test_log_get_request_details_on_failure(client, caplog, mock_uuid):
caplog.set_level(logging.INFO)
response = client.get("/logging_failure")
assert response.status_code == 500
assert response.text == "Internal Server Error"
assert len(caplog.messages) == 2
assert eval(caplog.messages[0]) == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "GET",
"request_status": "start",
"request_url.path": "/logging_failure",
}
end_message = eval(caplog.messages[1])
end_message.pop("error.traceback")
assert end_message == {
"error.class": "Exception",
"error.msg": "Error message",
"request.data": b"",
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "GET",
"request_status": "error",
"request_url.path": "/logging_failure",
}
def test_log_delete_request_details_on_failure(client, caplog, mock_uuid):
caplog.set_level(logging.INFO)
response = client.delete("/logging_failure")
assert response.status_code == 500
assert response.text == "Internal Server Error"
assert len(caplog.messages) == 2
assert eval(caplog.messages[0]) == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "DELETE",
"request_status": "start",
"request_url.path": "/logging_failure",
}
end_message = eval(caplog.messages[1])
end_message.pop("error.traceback")
assert end_message == {
"error.class": "Exception",
"error.msg": "Error message",
"request.data": b"",
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "DELETE",
"request_status": "error",
"request_url.path": "/logging_failure",
}
def test_log_post_request_details_on_failure(client, caplog, mock_uuid):
caplog.set_level(logging.INFO)
response = client.post("/logging_failure")
assert response.status_code == 500
assert response.text == "Internal Server Error"
assert len(caplog.messages) == 2
assert eval(caplog.messages[0]) == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "POST",
"request_status": "start",
"request_url.path": "/logging_failure",
}
end_message = eval(caplog.messages[1])
end_message.pop("error.traceback")
assert end_message == {
"error.class": "Exception",
"error.msg": "Error message",
"request.data": b"",
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "POST",
"request_status": "error",
"request_url.path": "/logging_failure",
}
def test_log_put_request_details_on_failure(client, caplog, mock_uuid):
caplog.set_level(logging.INFO)
response = client.put("/logging_failure")
assert response.status_code == 500
assert response.text == "Internal Server Error"
assert len(caplog.messages) == 2
assert eval(caplog.messages[0]) == {
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "PUT",
"request_status": "start",
"request_url.path": "/logging_failure",
}
end_message = eval(caplog.messages[1])
end_message.pop("error.traceback")
assert end_message == {
"error.class": "Exception",
"error.msg": "Error message",
"request.data": b"",
"request_headers.accept": "*/*",
"request_headers.accept-encoding": "gzip, deflate",
"request_headers.connection": "keep-alive",
"request_headers.content-length": "0",
"request_headers.host": "testserver",
"request_headers.user-agent": "testclient",
"request_id": "1-2-3-4-5",
"request_method": "PUT",
"request_status": "error",
"request_url.path": "/logging_failure",
}
def test_skip_log_get_request(client, caplog):
caplog.set_level(logging.INFO)
response = client.get("/skipped")
assert response.status_code == 200
assert response.text == ""
assert len(caplog.messages) == 0
def test_skip_log_delete_request(client, caplog):
caplog.set_level(logging.INFO)
response = client.delete("/skipped")
assert response.status_code == 200
assert response.text == ""
assert len(caplog.messages) == 0
def test_skip_log_post_request(client, caplog):
caplog.set_level(logging.INFO)
response = client.post("/skipped")
assert response.status_code == 200
assert response.text == ""
assert len(caplog.messages) == 0
def test_skip_log_put_request(client, caplog):
caplog.set_level(logging.INFO)
response = client.put("/skipped")
assert response.status_code == 200
assert response.text == ""
assert len(caplog.messages) == 0
| 35.432225
| 82
| 0.629277
| 1,511
| 13,854
| 5.570483
| 0.066181
| 0.153024
| 0.076037
| 0.008079
| 0.922181
| 0.920043
| 0.912914
| 0.907687
| 0.889034
| 0.889034
| 0
| 0.016111
| 0.220442
| 13,854
| 390
| 83
| 35.523077
| 0.763241
| 0
| 0
| 0.803468
| 0
| 0
| 0.376859
| 0.153891
| 0
| 0
| 0
| 0
| 0.150289
| 1
| 0.078035
| false
| 0
| 0.026012
| 0.026012
| 0.144509
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5ee2559b5bfaf4af62659882f1d51bc6115e4a45
| 11,528
|
py
|
Python
|
tests/test_adder.py
|
Kingorgg/logisimpy
|
de8764c931b42c04a2f6341a1d3811226b90c425
|
[
"MIT"
] | null | null | null |
tests/test_adder.py
|
Kingorgg/logisimpy
|
de8764c931b42c04a2f6341a1d3811226b90c425
|
[
"MIT"
] | null | null | null |
tests/test_adder.py
|
Kingorgg/logisimpy
|
de8764c931b42c04a2f6341a1d3811226b90c425
|
[
"MIT"
] | null | null | null |
import unittest
from logisimpy.adder import HalfAdder, FullAdder, Adder4Bit, Adder8Bit
halfadder0 = HalfAdder()
class TestHalfAdder(unittest.TestCase):
def test_halfadder_0_0(self):
halfadder0.a.drive(0)
halfadder0.b.drive(0)
assert halfadder0.sum.state == 0
assert halfadder0.carry.state == 0
def test_halfadder_0_1(self):
halfadder0.a.drive(0)
halfadder0.b.drive(1)
assert halfadder0.sum.state == 1
assert halfadder0.carry.state == 0
def test_halfadder_1_0(self):
halfadder0.a.drive(1)
halfadder0.b.drive(0)
assert halfadder0.sum.state == 1
assert halfadder0.carry.state == 0
def test_halfadder_1_1(self):
halfadder0.a.drive(1)
halfadder0.b.drive(1)
assert halfadder0.sum.state == 0
assert halfadder0.carry.state == 1
fulladder0 = FullAdder()
class TestFullAdder(unittest.TestCase):
def test_fulladder_0_0_0(self):
fulladder0.a.drive(0)
fulladder0.b.drive(0)
fulladder0.carry_in.drive(0)
assert fulladder0.sum.state == 0
assert fulladder0.carry.state == 0
def test_fulladder_0_0_1(self):
fulladder0.a.drive(0)
fulladder0.b.drive(0)
fulladder0.carry_in.drive(1)
assert fulladder0.sum.state == 1
assert fulladder0.carry.state == 0
def test_fulladder_1_0_0(self):
fulladder0.a.drive(1)
fulladder0.b.drive(0)
fulladder0.carry_in.drive(0)
assert fulladder0.sum.state == 1
assert fulladder0.carry.state == 0
def test_fulladder_0_1_0(self):
fulladder0.a.drive(0)
fulladder0.b.drive(1)
fulladder0.carry_in.drive(0)
assert fulladder0.sum.state == 1
assert fulladder0.carry.state == 0
def test_fulladder_1_1_0(self):
fulladder0.a.drive(1)
fulladder0.b.drive(1)
fulladder0.carry_in.drive(0)
assert fulladder0.sum.state == 0
assert fulladder0.carry.state == 1
def test_fulladder_1_1_1(self):
fulladder0.a.drive(1)
fulladder0.b.drive(1)
fulladder0.carry_in.drive(1)
assert fulladder0.sum.state == 1
assert fulladder0.carry.state == 1
adder4bit = Adder4Bit()
class TestAdder4Bit(unittest.TestCase):
def test_adder4bit_0_plus_0(self):
adder4bit.a0.drive(0)
adder4bit.a1.drive(0)
adder4bit.a2.drive(0)
adder4bit.a3.drive(0)
adder4bit.b0.drive(0)
adder4bit.b1.drive(0)
adder4bit.b2.drive(0)
adder4bit.b3.drive(0)
adder4bit.carry_in.drive(0)
assert adder4bit.sum0.state == 0
assert adder4bit.sum1.state == 0
assert adder4bit.sum2.state == 0
assert adder4bit.sum3.state == 0
assert adder4bit.carry.state == 0
def test_adder4bit_1_plus_1(self):
adder4bit.a0.drive(1)
adder4bit.a1.drive(0)
adder4bit.a2.drive(0)
adder4bit.a3.drive(0)
adder4bit.b0.drive(1)
adder4bit.b1.drive(0)
adder4bit.b2.drive(0)
adder4bit.b3.drive(0)
adder4bit.carry_in.drive(0)
assert adder4bit.sum0.state == 0
assert adder4bit.sum1.state == 1
assert adder4bit.sum2.state == 0
assert adder4bit.sum3.state == 0
assert adder4bit.carry.state == 0
def test_adder4bit_5_plus_1(self):
adder4bit.a0.drive(1)
adder4bit.a1.drive(0)
adder4bit.a2.drive(1)
adder4bit.a3.drive(0)
adder4bit.b0.drive(1)
adder4bit.b1.drive(0)
adder4bit.b2.drive(0)
adder4bit.b3.drive(0)
adder4bit.carry_in.drive(0)
assert adder4bit.sum0.state == 0
assert adder4bit.sum1.state == 1
assert adder4bit.sum2.state == 1
assert adder4bit.sum3.state == 0
assert adder4bit.carry.state == 0
def test_adder4bit_7_plus_1(self):
adder4bit.a0.drive(1)
adder4bit.a1.drive(1)
adder4bit.a2.drive(1)
adder4bit.a3.drive(0)
adder4bit.b0.drive(1)
adder4bit.b1.drive(0)
adder4bit.b2.drive(0)
adder4bit.b3.drive(0)
adder4bit.carry_in.drive(0)
assert adder4bit.sum0.state == 0
assert adder4bit.sum1.state == 0
assert adder4bit.sum2.state == 0
assert adder4bit.sum3.state == 1
assert adder4bit.carry.state == 0
def test_adder4bit_15_plus_1(self):
adder4bit.a0.drive(1)
adder4bit.a1.drive(1)
adder4bit.a2.drive(1)
adder4bit.a3.drive(1)
adder4bit.b0.drive(1)
adder4bit.b1.drive(0)
adder4bit.b2.drive(0)
adder4bit.b3.drive(0)
adder4bit.carry_in.drive(0)
assert adder4bit.sum0.state == 0
assert adder4bit.sum1.state == 0
assert adder4bit.sum2.state == 0
assert adder4bit.sum3.state == 0
assert adder4bit.carry.state == 1
adder8bit = Adder8Bit()
class TestAdder8Bit(unittest.TestCase):
def test_adder8bit_0_plus_0(self):
adder8bit.a0.drive(0)
adder8bit.a1.drive(0)
adder8bit.a2.drive(0)
adder8bit.a3.drive(0)
adder8bit.a4.drive(0)
adder8bit.a5.drive(0)
adder8bit.a6.drive(0)
adder8bit.a7.drive(0)
adder8bit.b0.drive(0)
adder8bit.b1.drive(0)
adder8bit.b2.drive(0)
adder8bit.b3.drive(0)
adder8bit.b4.drive(0)
adder8bit.b5.drive(0)
adder8bit.b6.drive(0)
adder8bit.b7.drive(0)
adder8bit.carry_in.drive(0)
assert adder8bit.sum0.state == 0
assert adder8bit.sum1.state == 0
assert adder8bit.sum2.state == 0
assert adder8bit.sum3.state == 0
assert adder8bit.sum4.state == 0
assert adder8bit.sum5.state == 0
assert adder8bit.sum6.state == 0
assert adder8bit.sum7.state == 0
assert adder8bit.carry.state == 0
def test_adder8bit_1_plus_1(self):
adder8bit.a0.drive(1)
adder8bit.a1.drive(0)
adder8bit.a2.drive(0)
adder8bit.a3.drive(0)
adder8bit.a4.drive(0)
adder8bit.a5.drive(0)
adder8bit.a6.drive(0)
adder8bit.a7.drive(0)
adder8bit.b0.drive(1)
adder8bit.b1.drive(0)
adder8bit.b2.drive(0)
adder8bit.b3.drive(0)
adder8bit.b4.drive(0)
adder8bit.b5.drive(0)
adder8bit.b6.drive(0)
adder8bit.b7.drive(0)
adder8bit.carry_in.drive(0)
assert adder8bit.sum0.state == 0
assert adder8bit.sum1.state == 1
assert adder8bit.sum2.state == 0
assert adder8bit.sum3.state == 0
assert adder8bit.sum4.state == 0
assert adder8bit.sum5.state == 0
assert adder8bit.sum6.state == 0
assert adder8bit.sum7.state == 0
assert adder8bit.carry.state == 0
def test_adder8bit_5_plus_1(self):
adder8bit.a0.drive(1)
adder8bit.a1.drive(0)
adder8bit.a2.drive(1)
adder8bit.a3.drive(0)
adder8bit.a4.drive(0)
adder8bit.a5.drive(0)
adder8bit.a6.drive(0)
adder8bit.a7.drive(0)
adder8bit.b0.drive(1)
adder8bit.b1.drive(0)
adder8bit.b2.drive(0)
adder8bit.b3.drive(0)
adder8bit.b4.drive(0)
adder8bit.b5.drive(0)
adder8bit.b6.drive(0)
adder8bit.b7.drive(0)
adder8bit.carry_in.drive(0)
assert adder8bit.sum0.state == 0
assert adder8bit.sum1.state == 1
assert adder8bit.sum2.state == 1
assert adder8bit.sum3.state == 0
assert adder8bit.sum4.state == 0
assert adder8bit.sum5.state == 0
assert adder8bit.sum6.state == 0
assert adder8bit.sum7.state == 0
assert adder8bit.carry.state == 0
def test_adder8bit_7_plus_1(self):
adder8bit.a0.drive(1)
adder8bit.a1.drive(1)
adder8bit.a2.drive(1)
adder8bit.a3.drive(0)
adder8bit.a4.drive(0)
adder8bit.a5.drive(0)
adder8bit.a6.drive(0)
adder8bit.a7.drive(0)
adder8bit.b0.drive(1)
adder8bit.b1.drive(0)
adder8bit.b2.drive(0)
adder8bit.b3.drive(0)
adder8bit.b4.drive(0)
adder8bit.b5.drive(0)
adder8bit.b6.drive(0)
adder8bit.b7.drive(0)
adder8bit.carry_in.drive(0)
assert adder8bit.sum0.state == 0
assert adder8bit.sum1.state == 0
assert adder8bit.sum2.state == 0
assert adder8bit.sum3.state == 1
assert adder8bit.sum4.state == 0
assert adder8bit.sum5.state == 0
assert adder8bit.sum6.state == 0
assert adder8bit.sum7.state == 0
assert adder8bit.carry.state == 0
def test_adder8bit_15_plus_1(self):
adder8bit.a0.drive(1)
adder8bit.a1.drive(1)
adder8bit.a2.drive(1)
adder8bit.a3.drive(1)
adder8bit.a4.drive(0)
adder8bit.a5.drive(0)
adder8bit.a6.drive(0)
adder8bit.a7.drive(0)
adder8bit.b0.drive(1)
adder8bit.b1.drive(0)
adder8bit.b2.drive(0)
adder8bit.b3.drive(0)
adder8bit.b4.drive(0)
adder8bit.b5.drive(0)
adder8bit.b6.drive(0)
adder8bit.b7.drive(0)
adder8bit.carry_in.drive(0)
assert adder8bit.sum0.state == 0
assert adder8bit.sum1.state == 0
assert adder8bit.sum2.state == 0
assert adder8bit.sum3.state == 0
assert adder8bit.sum4.state == 1
assert adder8bit.sum5.state == 0
assert adder8bit.sum6.state == 0
assert adder8bit.sum7.state == 0
assert adder8bit.carry.state == 0
def test_adder8bit_16_plus_16(self):
adder8bit.a0.drive(0)
adder8bit.a1.drive(0)
adder8bit.a2.drive(0)
adder8bit.a3.drive(0)
adder8bit.a4.drive(1)
adder8bit.a5.drive(0)
adder8bit.a6.drive(0)
adder8bit.a7.drive(0)
adder8bit.b0.drive(0)
adder8bit.b1.drive(0)
adder8bit.b2.drive(0)
adder8bit.b3.drive(0)
adder8bit.b4.drive(1)
adder8bit.b5.drive(0)
adder8bit.b6.drive(0)
adder8bit.b7.drive(0)
adder8bit.carry_in.drive(0)
assert adder8bit.sum0.state == 0
assert adder8bit.sum1.state == 0
assert adder8bit.sum2.state == 0
assert adder8bit.sum3.state == 0
assert adder8bit.sum4.state == 0
assert adder8bit.sum5.state == 1
assert adder8bit.sum6.state == 0
assert adder8bit.sum7.state == 0
assert adder8bit.carry.state == 0
def test_adder8bit_256_plus_1(self):
adder8bit.a0.drive(1)
adder8bit.a1.drive(1)
adder8bit.a2.drive(1)
adder8bit.a3.drive(1)
adder8bit.a4.drive(1)
adder8bit.a5.drive(1)
adder8bit.a6.drive(1)
adder8bit.a7.drive(1)
adder8bit.b0.drive(1)
adder8bit.b1.drive(0)
adder8bit.b2.drive(0)
adder8bit.b3.drive(0)
adder8bit.b4.drive(0)
adder8bit.b5.drive(0)
adder8bit.b6.drive(0)
adder8bit.b7.drive(0)
adder8bit.carry_in.drive(0)
assert adder8bit.sum0.state == 0
assert adder8bit.sum1.state == 0
assert adder8bit.sum2.state == 0
assert adder8bit.sum3.state == 0
assert adder8bit.sum4.state == 0
assert adder8bit.sum5.state == 0
assert adder8bit.sum6.state == 0
assert adder8bit.sum7.state == 0
assert adder8bit.carry.state == 1
| 27.845411
| 70
| 0.606176
| 1,502
| 11,528
| 4.584554
| 0.039947
| 0.121115
| 0.189515
| 0.152483
| 0.936828
| 0.930439
| 0.929858
| 0.927244
| 0.896602
| 0.87235
| 0
| 0.108964
| 0.281922
| 11,528
| 413
| 71
| 27.912833
| 0.72288
| 0
| 0
| 0.854545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.327273
| 1
| 0.066667
| false
| 0
| 0.006061
| 0
| 0.084848
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6f26727936cd3bab357452b8e8a8643e32a070b6
| 74
|
py
|
Python
|
Chapter 01/Chap01_Example1.175.py
|
Anancha/Programming-Techniques-using-Python
|
e80c329d2a27383909d358741a5cab03cb22fd8b
|
[
"MIT"
] | null | null | null |
Chapter 01/Chap01_Example1.175.py
|
Anancha/Programming-Techniques-using-Python
|
e80c329d2a27383909d358741a5cab03cb22fd8b
|
[
"MIT"
] | null | null | null |
Chapter 01/Chap01_Example1.175.py
|
Anancha/Programming-Techniques-using-Python
|
e80c329d2a27383909d358741a5cab03cb22fd8b
|
[
"MIT"
] | null | null | null |
print(bool(0)) # -- B1
print(bool(100)) # -- B2
print(bool(-100)) # -- B3
| 18.5
| 25
| 0.540541
| 12
| 74
| 3.333333
| 0.583333
| 0.675
| 0.6
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 0.162162
| 74
| 3
| 26
| 24.666667
| 0.483871
| 0.22973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
6f65e2765f1da8dd34a293c362c62d5aa6d14e24
| 15,508
|
py
|
Python
|
SBaaS_MFA/stage02_isotopomer_fittedFluxSplits_query.py
|
dmccloskey/SBaaS_MFA
|
005e1d34c2ace7e28c53dffcab3e9cb8c7e7ce18
|
[
"MIT"
] | null | null | null |
SBaaS_MFA/stage02_isotopomer_fittedFluxSplits_query.py
|
dmccloskey/SBaaS_MFA
|
005e1d34c2ace7e28c53dffcab3e9cb8c7e7ce18
|
[
"MIT"
] | null | null | null |
SBaaS_MFA/stage02_isotopomer_fittedFluxSplits_query.py
|
dmccloskey/SBaaS_MFA
|
005e1d34c2ace7e28c53dffcab3e9cb8c7e7ce18
|
[
"MIT"
] | null | null | null |
#SBaaS
from .stage02_isotopomer_fittedFluxSplits_postgresql_models import *
from SBaaS_base.sbaas_base import sbaas_base
from SBaaS_base.sbaas_base_query_update import sbaas_base_query_update
from SBaaS_base.sbaas_base_query_drop import sbaas_base_query_drop
from SBaaS_base.sbaas_base_query_initialize import sbaas_base_query_initialize
from SBaaS_base.sbaas_base_query_insert import sbaas_base_query_insert
from SBaaS_base.sbaas_base_query_select import sbaas_base_query_select
from SBaaS_base.sbaas_base_query_delete import sbaas_base_query_delete
from SBaaS_base.sbaas_template_query import sbaas_template_query
class stage02_isotopomer_fittedFluxSplits_query(sbaas_template_query):
def initialize_supportedTables(self):
'''Set the supported tables dict for ...
'''
tables_supported = {'data_stage02_isotopomer_fittedFluxSplits':data_stage02_isotopomer_fittedFluxSplits,
};
self.set_supportedTables(tables_supported);
## Query from data_stage02_isotopomer_fittedFluxSplits
# query simulation_dateAndTimes from data_stage02_isotopomer_fittedFluxSplits
def get_simulationDateAndTimes_simulationID_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I):
'''Query split ids that are used from the fitted splites'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).group_by(
data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime).order_by(
data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime.asc()).all();
simulation_dateAndTimes_O = [];
if data:
for d in data:
simulation_dateAndTimes_O.append(d.simulation_dateAndTime);
return simulation_dateAndTimes_O;
except SQLAlchemyError as e:
print(e);
# query split_ids from data_stage02_isotopomer_fittedFluxSplits
def get_splitIDs_simulationIDAndSimulationDateAndTime_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I,simulation_dateAndTime_I):
'''Query split ids that are used from the fitted splites'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits.split_id).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime==simulation_dateAndTime_I,
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).group_by(
data_stage02_isotopomer_fittedFluxSplits.split_id).order_by(
data_stage02_isotopomer_fittedFluxSplits.split_id.asc()).all();
split_ids_O = [];
if data:
for d in data:
split_ids_O.append(d.split_id);
return split_ids_O;
except SQLAlchemyError as e:
print(e);
# query split_units from data_stage02_isotopomer_fittedFluxSplits
def get_splitUnits_simulationIDAndSimulationDateAndTime_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I,simulation_dateAndTime_I):
'''Query split_units that are used from data_stage02_isotopomer_fittedFluxSplits'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits.split_units).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime==simulation_dateAndTime_I,
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).group_by(
data_stage02_isotopomer_fittedFluxSplits.split_units).order_by(
data_stage02_isotopomer_fittedFluxSplits.split_units.asc()).all();
split_units_O = [];
if data:
for d in data:
split_units_O.append(d.split_units);
return split_units_O;
except SQLAlchemyError as e:
print(e);
# query split_average, split_stdev, split_lb, and split_ub from data_stage02_isotopomer_fittedFluxSplits
def get_split_simulationIDAndSplitID_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I,split_id_I):
'''query split_average, split_stdev, split_lb, and split_ub from data_stage02_isotopomer_fittedFluxSplits'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.split_id.like(split_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).order_by(
data_stage02_isotopomer_fittedFluxSplits.split_rxn_id.asc()).all();
split_rxn_id_O,split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O=[],[],[],[],[],[];
if data:
for d in data:
split_rxn_id_O.append(d.split_rxn_id);
split_O.append(d.split);
split_stdev_O.append(d.split_stdev);
split_lb_O.append(d.split_lb);
split_ub_O.append(d.split_ub);
split_units_O.append(d.split_units);
return split_rxn_id_O,split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O;
except SQLAlchemyError as e:
print(e);
def get_split_simulationIDAndSimulationDateAndTimeAndSplitID_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I,simulation_dateAndTime_I,split_id_I):
'''query split_average, split_stdev, split_lb, and split_ub from data_stage02_isotopomer_fittedFluxSplits'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime==simulation_dateAndTime_I,
data_stage02_isotopomer_fittedFluxSplits.split_id.like(split_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).order_by(
data_stage02_isotopomer_fittedFluxSplits.split_rxn_id.asc()).all();
split_rxn_id_O,split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O=[],[],[],[],[],[];
if data:
for d in data:
split_rxn_id_O.append(d.split_rxn_id);
split_O.append(d.split);
split_stdev_O.append(d.split_stdev);
split_lb_O.append(d.split_lb);
split_ub_O.append(d.split_ub);
split_units_O.append(d.split_units);
return split_rxn_id_O,split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O;
except SQLAlchemyError as e:
print(e);
def get_split_simulationIDAndSimulationDateAndTimeAndFluxUnitsAndSplitID_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I,simulation_dateAndTime_I,split_units_I,split_id_I):
'''query split_average, split_stdev, split_lb, and split_ub from data_stage02_isotopomer_fittedFluxSplits'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.split_units.like(split_units_I),
data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime==simulation_dateAndTime_I,
data_stage02_isotopomer_fittedFluxSplits.split_id.like(split_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).order_by(
data_stage02_isotopomer_fittedFluxSplits.split_rxn_id.asc()).all();
split_rxn_id_O,split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O=[],[],[],[],[],[];
if data:
for d in data:
split_rxn_id_O.append(d.split_rxn_id);
split_O.append(d.split);
split_stdev_O.append(d.split_stdev);
split_lb_O.append(d.split_lb);
split_ub_O.append(d.split_ub);
split_units_O.append(d.split_units);
return split_rxn_id_O,split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O;
except SQLAlchemyError as e:
print(e);
def get_split_simulationIDAndSimulationDateAndTimeAndFluxUnitsAndSplitIDAndSplitRxnId_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I,simulation_dateAndTime_I,split_units_I,split_id_I,split_rxn_id_I):
'''query split_average, split_stdev, split_lb, and split_ub from data_stage02_isotopomer_fittedFluxSplits'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.split_units.like(split_units_I),
data_stage02_isotopomer_fittedFluxSplits.split_rxn_id.like(split_rxn_id_I),
data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime==simulation_dateAndTime_I,
data_stage02_isotopomer_fittedFluxSplits.split_id.like(split_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).all();
split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O=0.0,0.0,0.0,0.0,'';
if len(data)>1:
print('more than 1 row found')
return;
if data:
for d in data:
split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O = d.split,d.split_stdev,d.split_lb,d.split_ub,d.split_units;
return split_O,split_stdev_O,split_lb_O,split_ub_O,split_units_O;
except SQLAlchemyError as e:
print(e);
# query rows from data_stage02_isotopomer_fittedFluxSplits
def get_rows_simulationID_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I):
'''Query rows that are used from the split_average'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).all();
rows_O = [];
if data:
for d in data:
data_tmp = {'id':d.id,
'simulation_id':d.simulation_id,
'simulation_dateAndTime':d.simulation_dateAndTime,
'split_id':d.split_id,
'split_rxn_id':d.split_rxn_id,
'split':d.split,
'split_stdev':d.split_stdev,
'split_units':d.split_units,
'split_lb':d.split_lb,
'split_ub':d.split_ub,
'used_':d.used_,
'comment_':d.comment_};
rows_O.append(data_tmp);
return rows_O;
except SQLAlchemyError as e:
print(e);
def get_rowsDict_simulationID_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I):
'''Query rows that are used from the split_average'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).all();
rows_O = {};
if data:
for d in data:
if d.split_id in rows_O:
print('duplicate split_id found!');
else:
rows_O[d.split_id]={
'split':d.split,
'split_stdev':d.split_stdev,
'split_units':d.split_units,
'split_lb':d.split_lb,
'split_ub':d.split_ub};
return rows_O;
except SQLAlchemyError as e:
print(e);
def get_rowsEscherFluxLbUb_simulationID_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I):
'''Query rows that are used from the split_average'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).all();
rows_O = [None,None];
rows_O[0] = {};
rows_O[1] = {}
if data:
for d in data:
rows_O[0][d.split_id]=d.split_lb;
rows_O[1][d.split_id]=d.split_ub;
return rows_O;
except SQLAlchemyError as e:
print(e);
def get_rowsEscherFlux_simulationID_dataStage02IsotopomerfittedFluxSplits(self,simulation_id_I):
'''Query rows that are used from the split_average
output: dict, split_id:split'''
try:
data = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.used_.is_(True)).all();
rows_O = {}
if data:
for d in data:
rows_O[d.split_id]=d.split;
return rows_O;
except SQLAlchemyError as e:
print(e);
def initialize_dataStage02_isotopomer_fittedFluxSplits(self):
try:
data_stage02_isotopomer_fittedFluxSplits.__table__.create(self.engine,True);
except SQLAlchemyError as e:
print(e);
def drop_dataStage02_isotopomer_fittedFluxSplits(self):
try:
data_stage02_isotopomer_fittedFluxSplits.__table__.drop(self.engine,True);
except SQLAlchemyError as e:
print(e);
def reset_dataStage02_isotopomer_fittedFluxSplits(self,simulation_id_I = None,simulation_dateAndTime_I=None):
try:
if simulation_id_I and simulation_dateAndTime_I:
reset = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(
data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I),
data_stage02_isotopomer_fittedFluxSplits.simulation_dateAndTime==self.convert_string2datetime(simulation_dateAndTime_I)).delete(synchronize_session=False);
else:
reset = self.session.query(data_stage02_isotopomer_fittedFluxSplits).filter(data_stage02_isotopomer_fittedFluxSplits.simulation_id.like(simulation_id_I)).delete(synchronize_session=False);
self.session.commit();
except SQLAlchemyError as e:
print(e);
| 60.815686
| 215
| 0.668687
| 1,740
| 15,508
| 5.510345
| 0.061494
| 0.214226
| 0.261577
| 0.285565
| 0.828744
| 0.802879
| 0.779412
| 0.72695
| 0.721944
| 0.702545
| 0
| 0.016986
| 0.259737
| 15,508
| 254
| 216
| 61.055118
| 0.818206
| 0.08357
| 0
| 0.646288
| 0
| 0
| 0.017124
| 0.004387
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065502
| false
| 0
| 0.039301
| 0
| 0.157205
| 0.069869
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
6f6f2a615fb9e0949fafd396317251f2b2ef6f77
| 2,103
|
py
|
Python
|
Python/notebooks/blind_contour/cld_mst_.py
|
kylemcdonald/BlindSelfPortrait
|
702afbe98c01a6f0051ba81cfe85bd3f5b2ce6cd
|
[
"MIT"
] | 18
|
2015-12-07T13:46:18.000Z
|
2021-12-08T23:24:41.000Z
|
Python/notebooks/blind_contour/cld_mst_.py
|
kylemcdonald/BlindSelfPortrait
|
702afbe98c01a6f0051ba81cfe85bd3f5b2ce6cd
|
[
"MIT"
] | 3
|
2021-01-28T04:27:42.000Z
|
2022-02-26T06:34:48.000Z
|
Python/notebooks/blind_contour/cld_mst_.py
|
kylemcdonald/BlindSelfPortrait
|
702afbe98c01a6f0051ba81cfe85bd3f5b2ce6cd
|
[
"MIT"
] | 3
|
2016-06-09T04:20:00.000Z
|
2022-02-17T11:16:27.000Z
|
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
# Comes from cld edges version-mst-pypy.ipynb, cell
import json
import glob
import subprocess
import cv2
import numpy as np
from matplotlib import pyplot as plt
from shapely import geometry as geom
from math import floor
| 17.822034
| 52
| 0.774608
| 333
| 2,103
| 4.891892
| 0.075075
| 0.049724
| 0.066298
| 0.093923
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0.005376
| 0.203994
| 2,103
| 118
| 53
| 17.822034
| 0.967742
| 0.213505
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
48b2cdfc4d940311f7e73660344a117eceea53e3
| 10,435
|
py
|
Python
|
encoding/functions/syncbn.py
|
Womcos/SCARF
|
b90251bc23410cb810a7082ca75147a7aae21dec
|
[
"MIT"
] | 1
|
2021-04-06T11:29:04.000Z
|
2021-04-06T11:29:04.000Z
|
encoding/functions/syncbn.py
|
Womcos/SCARF
|
b90251bc23410cb810a7082ca75147a7aae21dec
|
[
"MIT"
] | null | null | null |
encoding/functions/syncbn.py
|
Womcos/SCARF
|
b90251bc23410cb810a7082ca75147a7aae21dec
|
[
"MIT"
] | 1
|
2021-04-06T08:41:12.000Z
|
2021-04-06T08:41:12.000Z
|
"""Synchronized Cross-GPU Batch Normalization functions"""
import torch
import torch.cuda.comm as comm
from torch.autograd import Function
from torch.autograd.function import once_differentiable
from encoding import cpu
if torch.cuda.device_count() > 0:
from encoding import gpu
__all__ = ['moments', 'syncbatchnorm', 'inp_syncbatchnorm']
class moments_(Function):
@staticmethod
def forward(ctx, x):
if x.is_cuda:
ex, ex2 = gpu.expectation_forward(x)
else:
raise NotImplemented
ctx.save_for_backward(x)
return ex, ex2
@staticmethod
def backward(ctx, dex, dex2):
x, = ctx.saved_tensors
if dex.is_cuda:
dx = gpu.expectation_backward(x, dex, dex2)
else:
raise NotImplemented
return dx
class syncbatchnorm_(Function):
@classmethod
def forward(cls, ctx, x, gamma, beta, running_mean, running_var,
extra, sync=True, training=True, momentum=0.1, eps=1e-05,
activation="none", slope=0.01):
# save context
cls._parse_extra(ctx, extra)
ctx.sync = sync
ctx.training = training
ctx.momentum = momentum
ctx.eps = eps
ctx.activation = activation
ctx.slope = slope
assert activation == 'none'
# continous inputs
x = x.contiguous()
gamma = gamma.contiguous()
beta = beta.contiguous()
if ctx.training:
if x.is_cuda:
_ex, _exs = gpu.expectation_forward(x)
else:
raise NotImplemented
if ctx.sync:
if ctx.is_master:
_ex, _exs = [_ex.unsqueeze(0)], [_exs.unsqueeze(0)]
for _ in range(ctx.master_queue.maxsize):
_ex_w, _exs_w = ctx.master_queue.get()
ctx.master_queue.task_done()
_ex.append(_ex_w.unsqueeze(0))
_exs.append(_exs_w.unsqueeze(0))
_ex = comm.gather(_ex).mean(0)
_exs = comm.gather(_exs).mean(0)
tensors = comm.broadcast_coalesced((_ex, _exs), [_ex.get_device()] + ctx.worker_ids)
for ts, queue in zip(tensors[1:], ctx.worker_queues):
queue.put(ts)
else:
ctx.master_queue.put((_ex, _exs))
_ex, _exs = ctx.worker_queue.get()
ctx.worker_queue.task_done()
# Update running stats
_var = _exs - _ex ** 2
running_mean.mul_((1 - ctx.momentum)).add_(ctx.momentum * _ex)
running_var.mul_((1 - ctx.momentum)).add_(ctx.momentum * _var)
# Mark in-place modified tensors
ctx.mark_dirty(running_mean, running_var)
else:
_ex, _var = running_mean.contiguous(), running_var.contiguous()
_exs = _var + _ex ** 2
# BN forward + activation
if x.is_cuda:
y = gpu.batchnorm_forward(x, _ex, _exs, gamma, beta, ctx.eps)
else:
y = cpu.batchnorm_forward(x, _ex, _exs, gamma, beta, ctx.eps)
# Output
ctx.save_for_backward(x, _ex, _exs, gamma, beta)
return y
@staticmethod
@once_differentiable
def backward(ctx, dz):
x, _ex, _exs, gamma, beta = ctx.saved_tensors
dz = dz.contiguous()
# BN backward
if dz.is_cuda:
dx, _dex, _dexs, dgamma, dbeta = \
gpu.batchnorm_backward(dz, x, _ex, _exs, gamma, beta, ctx.eps)
else:
raise NotImplemented
if ctx.training:
if ctx.sync:
if ctx.is_master:
_dex, _dexs = [_dex.unsqueeze(0)], [_dexs.unsqueeze(0)]
for _ in range(ctx.master_queue.maxsize):
_dex_w, _dexs_w = ctx.master_queue.get()
ctx.master_queue.task_done()
_dex.append(_dex_w.unsqueeze(0))
_dexs.append(_dexs_w.unsqueeze(0))
_dex = comm.gather(_dex).mean(0)
_dexs = comm.gather(_dexs).mean(0)
tensors = comm.broadcast_coalesced((_dex, _dexs), [_dex.get_device()] + ctx.worker_ids)
for ts, queue in zip(tensors[1:], ctx.worker_queues):
queue.put(ts)
else:
ctx.master_queue.put((_dex, _dexs))
_dex, _dexs = ctx.worker_queue.get()
ctx.worker_queue.task_done()
if x.is_cuda:
dx_ = gpu.expectation_backward(x, _dex, _dexs)
else:
raise NotImplemented
dx = dx + dx_
return dx, dgamma, dbeta, None, None, None, None, None, None, None, None, None
@staticmethod
def _parse_extra(ctx, extra):
ctx.is_master = extra["is_master"]
if ctx.is_master:
ctx.master_queue = extra["master_queue"]
ctx.worker_queues = extra["worker_queues"]
ctx.worker_ids = extra["worker_ids"]
else:
ctx.master_queue = extra["master_queue"]
ctx.worker_queue = extra["worker_queue"]
def _act_forward(ctx, x):
if ctx.activation.lower() == "leaky_relu":
if x.is_cuda:
gpu.leaky_relu_forward(x, ctx.slope)
else:
raise NotImplemented
else:
assert activation == 'none'
def _act_backward(ctx, x, dx):
if ctx.activation.lower() == "leaky_relu":
if x.is_cuda:
gpu.leaky_relu_backward(x, dx, ctx.slope)
else:
raise NotImplemented
else:
assert activation == 'none'
class inp_syncbatchnorm_(Function):
@classmethod
def forward(cls, ctx, x, gamma, beta, running_mean, running_var,
extra, sync=True, training=True, momentum=0.1, eps=1e-05,
activation="none", slope=0.01):
# save context
cls._parse_extra(ctx, extra)
ctx.sync = sync
ctx.training = training
ctx.momentum = momentum
ctx.eps = eps
ctx.activation = activation
ctx.slope = slope
# continous inputs
x = x.contiguous()
gamma = gamma.contiguous()
beta = beta.contiguous()
if ctx.training:
if x.is_cuda:
_ex, _exs = gpu.expectation_forward(x)
else:
raise NotImplemented
if ctx.sync:
if ctx.is_master:
_ex, _exs = [_ex.unsqueeze(0)], [_exs.unsqueeze(0)]
for _ in range(ctx.master_queue.maxsize):
_ex_w, _exs_w = ctx.master_queue.get()
ctx.master_queue.task_done()
_ex.append(_ex_w.unsqueeze(0))
_exs.append(_exs_w.unsqueeze(0))
_ex = comm.gather(_ex).mean(0)
_exs = comm.gather(_exs).mean(0)
tensors = comm.broadcast_coalesced((_ex, _exs), [_ex.get_device()] + ctx.worker_ids)
for ts, queue in zip(tensors[1:], ctx.worker_queues):
queue.put(ts)
else:
ctx.master_queue.put((_ex, _exs))
_ex, _exs = ctx.worker_queue.get()
ctx.worker_queue.task_done()
# Update running stats
_var = _exs - _ex ** 2
running_mean.mul_((1 - ctx.momentum)).add_(ctx.momentum * _ex)
running_var.mul_((1 - ctx.momentum)).add_(ctx.momentum * _var)
# Mark in-place modified tensors
ctx.mark_dirty(x, running_mean, running_var)
else:
_ex, _var = running_mean.contiguous(), running_var.contiguous()
_exs = _var + _ex ** 2
ctx.mark_dirty(x)
# BN forward + activation
if x.is_cuda:
gpu.batchnorm_inp_forward(x, _ex, _exs, gamma, beta, ctx.eps)
else:
raise NotImplemented
_act_forward(ctx, x)
# Output
ctx.save_for_backward(x, _ex, _exs, gamma, beta)
return x
@staticmethod
@once_differentiable
def backward(ctx, dz):
z, _ex, _exs, gamma, beta = ctx.saved_tensors
dz = dz.contiguous()
# Undo activation
_act_backward(ctx, z, dz)
# BN backward
if dz.is_cuda:
dx, _dex, _dexs, dgamma, dbeta = \
gpu.batchnorm_inp_backward(dz, z, _ex, _exs, gamma, beta, ctx.eps)
else:
raise NotImplemented
if ctx.training:
if ctx.sync:
if ctx.is_master:
_dex, _dexs = [_dex.unsqueeze(0)], [_dexs.unsqueeze(0)]
for _ in range(ctx.master_queue.maxsize):
_dex_w, _dexs_w = ctx.master_queue.get()
ctx.master_queue.task_done()
_dex.append(_dex_w.unsqueeze(0))
_dexs.append(_dexs_w.unsqueeze(0))
_dex = comm.gather(_dex).mean(0)
_dexs = comm.gather(_dexs).mean(0)
tensors = comm.broadcast_coalesced((_dex, _dexs), [_dex.get_device()] + ctx.worker_ids)
for ts, queue in zip(tensors[1:], ctx.worker_queues):
queue.put(ts)
else:
ctx.master_queue.put((_dex, _dexs))
_dex, _dexs = ctx.worker_queue.get()
ctx.worker_queue.task_done()
if z.is_cuda:
gpu.expectation_inp_backward(dx, z, _dex, _dexs, _ex, _exs, gamma, beta, ctx.eps)
else:
raise NotImplemented
return dx, dgamma, dbeta, None, None, None, None, None, None, None, None, None
@staticmethod
def _parse_extra(ctx, extra):
ctx.is_master = extra["is_master"]
if ctx.is_master:
ctx.master_queue = extra["master_queue"]
ctx.worker_queues = extra["worker_queues"]
ctx.worker_ids = extra["worker_ids"]
else:
ctx.master_queue = extra["master_queue"]
ctx.worker_queue = extra["worker_queue"]
moments = moments_.apply
syncbatchnorm = syncbatchnorm_.apply
inp_syncbatchnorm = inp_syncbatchnorm_.apply
| 35.016779
| 107
| 0.537039
| 1,195
| 10,435
| 4.406695
| 0.099582
| 0.050133
| 0.053171
| 0.03646
| 0.860046
| 0.847892
| 0.847892
| 0.809913
| 0.797
| 0.751804
| 0
| 0.008551
| 0.361188
| 10,435
| 297
| 108
| 35.13468
| 0.781428
| 0.03057
| 0
| 0.808511
| 0
| 0
| 0.0211
| 0
| 0
| 0
| 0
| 0
| 0.012766
| 1
| 0.042553
| false
| 0
| 0.025532
| 0
| 0.106383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2de3da62fa02fd70a717a6e36e81ee906259f48
| 160
|
py
|
Python
|
kmeans/kmeans/globals.py
|
fpeterek/parallel-algorithms
|
9557c73bd66d58283c3b03a03a4ea024ccdcbd3a
|
[
"MIT"
] | null | null | null |
kmeans/kmeans/globals.py
|
fpeterek/parallel-algorithms
|
9557c73bd66d58283c3b03a03a4ea024ccdcbd3a
|
[
"MIT"
] | null | null | null |
kmeans/kmeans/globals.py
|
fpeterek/parallel-algorithms
|
9557c73bd66d58283c3b03a03a4ea024ccdcbd3a
|
[
"MIT"
] | null | null | null |
from typing import Optional
import multiprocessing as mp
data: Optional[list] = None
in_queue: Optional[mp.Queue] = None
out_queue: Optional[mp.Queue] = None
| 20
| 36
| 0.775
| 24
| 160
| 5.083333
| 0.541667
| 0.213115
| 0.245902
| 0.327869
| 0.393443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1375
| 160
| 7
| 37
| 22.857143
| 0.884058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
96112240436e98460c77686165297c4ad683af87
| 351,996
|
py
|
Python
|
sdk/python/pulumi_gcp/container/outputs.py
|
pulumi/pulumi-gcp
|
e6f415b20430d0efcc07fccb6047c30349d25e23
|
[
"ECL-2.0",
"Apache-2.0"
] | 121
|
2018-06-18T19:16:42.000Z
|
2022-03-31T06:06:48.000Z
|
sdk/python/pulumi_gcp/container/outputs.py
|
pulumi/pulumi-gcp
|
e6f415b20430d0efcc07fccb6047c30349d25e23
|
[
"ECL-2.0",
"Apache-2.0"
] | 492
|
2018-06-22T19:41:03.000Z
|
2022-03-31T15:33:53.000Z
|
sdk/python/pulumi_gcp/container/outputs.py
|
pulumi/pulumi-gcp
|
e6f415b20430d0efcc07fccb6047c30349d25e23
|
[
"ECL-2.0",
"Apache-2.0"
] | 43
|
2018-06-19T01:43:13.000Z
|
2022-03-23T22:43:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'AwsClusterAuthorization',
'AwsClusterAuthorizationAdminUser',
'AwsClusterControlPlane',
'AwsClusterControlPlaneAwsServicesAuthentication',
'AwsClusterControlPlaneConfigEncryption',
'AwsClusterControlPlaneDatabaseEncryption',
'AwsClusterControlPlaneMainVolume',
'AwsClusterControlPlaneProxyConfig',
'AwsClusterControlPlaneRootVolume',
'AwsClusterControlPlaneSshConfig',
'AwsClusterFleet',
'AwsClusterNetworking',
'AwsClusterWorkloadIdentityConfig',
'AwsNodePoolAutoscaling',
'AwsNodePoolConfig',
'AwsNodePoolConfigConfigEncryption',
'AwsNodePoolConfigRootVolume',
'AwsNodePoolConfigSshConfig',
'AwsNodePoolConfigTaint',
'AwsNodePoolMaxPodsConstraint',
'AzureClusterAuthorization',
'AzureClusterAuthorizationAdminUser',
'AzureClusterControlPlane',
'AzureClusterControlPlaneDatabaseEncryption',
'AzureClusterControlPlaneMainVolume',
'AzureClusterControlPlaneProxyConfig',
'AzureClusterControlPlaneReplicaPlacement',
'AzureClusterControlPlaneRootVolume',
'AzureClusterControlPlaneSshConfig',
'AzureClusterFleet',
'AzureClusterNetworking',
'AzureClusterWorkloadIdentityConfig',
'AzureNodePoolAutoscaling',
'AzureNodePoolConfig',
'AzureNodePoolConfigRootVolume',
'AzureNodePoolConfigSshConfig',
'AzureNodePoolMaxPodsConstraint',
'ClusterAddonsConfig',
'ClusterAddonsConfigCloudrunConfig',
'ClusterAddonsConfigConfigConnectorConfig',
'ClusterAddonsConfigDnsCacheConfig',
'ClusterAddonsConfigGcePersistentDiskCsiDriverConfig',
'ClusterAddonsConfigHorizontalPodAutoscaling',
'ClusterAddonsConfigHttpLoadBalancing',
'ClusterAddonsConfigIstioConfig',
'ClusterAddonsConfigKalmConfig',
'ClusterAddonsConfigNetworkPolicyConfig',
'ClusterAuthenticatorGroupsConfig',
'ClusterClusterAutoscaling',
'ClusterClusterAutoscalingAutoProvisioningDefaults',
'ClusterClusterAutoscalingResourceLimit',
'ClusterClusterTelemetry',
'ClusterConfidentialNodes',
'ClusterDatabaseEncryption',
'ClusterDefaultSnatStatus',
'ClusterDnsConfig',
'ClusterIpAllocationPolicy',
'ClusterLoggingConfig',
'ClusterMaintenancePolicy',
'ClusterMaintenancePolicyDailyMaintenanceWindow',
'ClusterMaintenancePolicyMaintenanceExclusion',
'ClusterMaintenancePolicyRecurringWindow',
'ClusterMasterAuth',
'ClusterMasterAuthClientCertificateConfig',
'ClusterMasterAuthorizedNetworksConfig',
'ClusterMasterAuthorizedNetworksConfigCidrBlock',
'ClusterMonitoringConfig',
'ClusterNetworkPolicy',
'ClusterNodeConfig',
'ClusterNodeConfigEphemeralStorageConfig',
'ClusterNodeConfigGcfsConfig',
'ClusterNodeConfigGuestAccelerator',
'ClusterNodeConfigKubeletConfig',
'ClusterNodeConfigLinuxNodeConfig',
'ClusterNodeConfigSandboxConfig',
'ClusterNodeConfigShieldedInstanceConfig',
'ClusterNodeConfigTaint',
'ClusterNodeConfigWorkloadMetadataConfig',
'ClusterNodePool',
'ClusterNodePoolAutoscaling',
'ClusterNodePoolManagement',
'ClusterNodePoolNetworkConfig',
'ClusterNodePoolNodeConfig',
'ClusterNodePoolNodeConfigEphemeralStorageConfig',
'ClusterNodePoolNodeConfigGcfsConfig',
'ClusterNodePoolNodeConfigGuestAccelerator',
'ClusterNodePoolNodeConfigKubeletConfig',
'ClusterNodePoolNodeConfigLinuxNodeConfig',
'ClusterNodePoolNodeConfigSandboxConfig',
'ClusterNodePoolNodeConfigShieldedInstanceConfig',
'ClusterNodePoolNodeConfigTaint',
'ClusterNodePoolNodeConfigWorkloadMetadataConfig',
'ClusterNodePoolUpgradeSettings',
'ClusterNotificationConfig',
'ClusterNotificationConfigPubsub',
'ClusterPodSecurityPolicyConfig',
'ClusterPrivateClusterConfig',
'ClusterPrivateClusterConfigMasterGlobalAccessConfig',
'ClusterReleaseChannel',
'ClusterResourceUsageExportConfig',
'ClusterResourceUsageExportConfigBigqueryDestination',
'ClusterVerticalPodAutoscaling',
'ClusterWorkloadIdentityConfig',
'NodePoolAutoscaling',
'NodePoolManagement',
'NodePoolNetworkConfig',
'NodePoolNodeConfig',
'NodePoolNodeConfigEphemeralStorageConfig',
'NodePoolNodeConfigGcfsConfig',
'NodePoolNodeConfigGuestAccelerator',
'NodePoolNodeConfigKubeletConfig',
'NodePoolNodeConfigLinuxNodeConfig',
'NodePoolNodeConfigSandboxConfig',
'NodePoolNodeConfigShieldedInstanceConfig',
'NodePoolNodeConfigTaint',
'NodePoolNodeConfigWorkloadMetadataConfig',
'NodePoolUpgradeSettings',
'GetClusterAddonsConfigResult',
'GetClusterAddonsConfigCloudrunConfigResult',
'GetClusterAddonsConfigConfigConnectorConfigResult',
'GetClusterAddonsConfigDnsCacheConfigResult',
'GetClusterAddonsConfigGcePersistentDiskCsiDriverConfigResult',
'GetClusterAddonsConfigHorizontalPodAutoscalingResult',
'GetClusterAddonsConfigHttpLoadBalancingResult',
'GetClusterAddonsConfigIstioConfigResult',
'GetClusterAddonsConfigKalmConfigResult',
'GetClusterAddonsConfigNetworkPolicyConfigResult',
'GetClusterAuthenticatorGroupsConfigResult',
'GetClusterClusterAutoscalingResult',
'GetClusterClusterAutoscalingAutoProvisioningDefaultResult',
'GetClusterClusterAutoscalingResourceLimitResult',
'GetClusterClusterTelemetryResult',
'GetClusterConfidentialNodeResult',
'GetClusterDatabaseEncryptionResult',
'GetClusterDefaultSnatStatusResult',
'GetClusterDnsConfigResult',
'GetClusterIpAllocationPolicyResult',
'GetClusterLoggingConfigResult',
'GetClusterMaintenancePolicyResult',
'GetClusterMaintenancePolicyDailyMaintenanceWindowResult',
'GetClusterMaintenancePolicyMaintenanceExclusionResult',
'GetClusterMaintenancePolicyRecurringWindowResult',
'GetClusterMasterAuthResult',
'GetClusterMasterAuthClientCertificateConfigResult',
'GetClusterMasterAuthorizedNetworksConfigResult',
'GetClusterMasterAuthorizedNetworksConfigCidrBlockResult',
'GetClusterMonitoringConfigResult',
'GetClusterNetworkPolicyResult',
'GetClusterNodeConfigResult',
'GetClusterNodeConfigEphemeralStorageConfigResult',
'GetClusterNodeConfigGcfsConfigResult',
'GetClusterNodeConfigGuestAcceleratorResult',
'GetClusterNodeConfigKubeletConfigResult',
'GetClusterNodeConfigLinuxNodeConfigResult',
'GetClusterNodeConfigSandboxConfigResult',
'GetClusterNodeConfigShieldedInstanceConfigResult',
'GetClusterNodeConfigTaintResult',
'GetClusterNodeConfigWorkloadMetadataConfigResult',
'GetClusterNodePoolResult',
'GetClusterNodePoolAutoscalingResult',
'GetClusterNodePoolManagementResult',
'GetClusterNodePoolNetworkConfigResult',
'GetClusterNodePoolNodeConfigResult',
'GetClusterNodePoolNodeConfigEphemeralStorageConfigResult',
'GetClusterNodePoolNodeConfigGcfsConfigResult',
'GetClusterNodePoolNodeConfigGuestAcceleratorResult',
'GetClusterNodePoolNodeConfigKubeletConfigResult',
'GetClusterNodePoolNodeConfigLinuxNodeConfigResult',
'GetClusterNodePoolNodeConfigSandboxConfigResult',
'GetClusterNodePoolNodeConfigShieldedInstanceConfigResult',
'GetClusterNodePoolNodeConfigTaintResult',
'GetClusterNodePoolNodeConfigWorkloadMetadataConfigResult',
'GetClusterNodePoolUpgradeSettingResult',
'GetClusterNotificationConfigResult',
'GetClusterNotificationConfigPubsubResult',
'GetClusterPodSecurityPolicyConfigResult',
'GetClusterPrivateClusterConfigResult',
'GetClusterPrivateClusterConfigMasterGlobalAccessConfigResult',
'GetClusterReleaseChannelResult',
'GetClusterResourceUsageExportConfigResult',
'GetClusterResourceUsageExportConfigBigqueryDestinationResult',
'GetClusterVerticalPodAutoscalingResult',
'GetClusterWorkloadIdentityConfigResult',
]
@pulumi.output_type
class AwsClusterAuthorization(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "adminUsers":
suggest = "admin_users"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterAuthorization. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterAuthorization.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterAuthorization.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
admin_users: Sequence['outputs.AwsClusterAuthorizationAdminUser']):
"""
:param Sequence['AwsClusterAuthorizationAdminUserArgs'] admin_users: Required. Users to perform operations as a cluster admin. A managed ClusterRoleBinding will be created to grant the `cluster-admin` ClusterRole to the users. At most one user can be specified. For more info on RBAC, see https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles
"""
pulumi.set(__self__, "admin_users", admin_users)
@property
@pulumi.getter(name="adminUsers")
def admin_users(self) -> Sequence['outputs.AwsClusterAuthorizationAdminUser']:
"""
Required. Users to perform operations as a cluster admin. A managed ClusterRoleBinding will be created to grant the `cluster-admin` ClusterRole to the users. At most one user can be specified. For more info on RBAC, see https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles
"""
return pulumi.get(self, "admin_users")
@pulumi.output_type
class AwsClusterAuthorizationAdminUser(dict):
def __init__(__self__, *,
username: str):
"""
:param str username: Required. The name of the user, e.g. `my-gcp-id@gmail.com`.
"""
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def username(self) -> str:
"""
Required. The name of the user, e.g. `my-gcp-id@gmail.com`.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class AwsClusterControlPlane(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "awsServicesAuthentication":
suggest = "aws_services_authentication"
elif key == "configEncryption":
suggest = "config_encryption"
elif key == "databaseEncryption":
suggest = "database_encryption"
elif key == "iamInstanceProfile":
suggest = "iam_instance_profile"
elif key == "subnetIds":
suggest = "subnet_ids"
elif key == "instanceType":
suggest = "instance_type"
elif key == "mainVolume":
suggest = "main_volume"
elif key == "proxyConfig":
suggest = "proxy_config"
elif key == "rootVolume":
suggest = "root_volume"
elif key == "securityGroupIds":
suggest = "security_group_ids"
elif key == "sshConfig":
suggest = "ssh_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterControlPlane. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterControlPlane.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterControlPlane.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
aws_services_authentication: 'outputs.AwsClusterControlPlaneAwsServicesAuthentication',
config_encryption: 'outputs.AwsClusterControlPlaneConfigEncryption',
database_encryption: 'outputs.AwsClusterControlPlaneDatabaseEncryption',
iam_instance_profile: str,
subnet_ids: Sequence[str],
version: str,
instance_type: Optional[str] = None,
main_volume: Optional['outputs.AwsClusterControlPlaneMainVolume'] = None,
proxy_config: Optional['outputs.AwsClusterControlPlaneProxyConfig'] = None,
root_volume: Optional['outputs.AwsClusterControlPlaneRootVolume'] = None,
security_group_ids: Optional[Sequence[str]] = None,
ssh_config: Optional['outputs.AwsClusterControlPlaneSshConfig'] = None,
tags: Optional[Mapping[str, str]] = None):
"""
:param 'AwsClusterControlPlaneAwsServicesAuthenticationArgs' aws_services_authentication: Required. Authentication configuration for management of AWS resources.
:param 'AwsClusterControlPlaneConfigEncryptionArgs' config_encryption: Required. The ARN of the AWS KMS key used to encrypt cluster configuration.
:param 'AwsClusterControlPlaneDatabaseEncryptionArgs' database_encryption: Required. The ARN of the AWS KMS key used to encrypt cluster secrets.
:param str iam_instance_profile: Required. The name of the AWS IAM instance pofile to assign to each control plane replica.
:param Sequence[str] subnet_ids: Required. The list of subnets where control plane replicas will run. A replica will be provisioned on each subnet and up to three values can be provided. Each subnet must be in a different AWS Availability Zone (AZ).
:param str version: Required. The Kubernetes version to run on control plane replicas (e.g. `1.19.10-gke.1000`). You can list all supported versions on a given Google Cloud region by calling .
:param str instance_type: Optional. The AWS instance type. When unspecified, it defaults to `t3.medium`.
:param 'AwsClusterControlPlaneMainVolumeArgs' main_volume: Optional. Configuration related to the main volume provisioned for each control plane replica. The main volume is in charge of storing all of the cluster's etcd state. Volumes will be provisioned in the availability zone associated with the corresponding subnet. When unspecified, it defaults to 8 GiB with the GP2 volume type.
:param 'AwsClusterControlPlaneProxyConfigArgs' proxy_config: Proxy configuration for outbound HTTP(S) traffic.
:param 'AwsClusterControlPlaneRootVolumeArgs' root_volume: Optional. Configuration related to the root volume provisioned for each control plane replica. Volumes will be provisioned in the availability zone associated with the corresponding subnet. When unspecified, it defaults to 32 GiB with the GP2 volume type.
:param Sequence[str] security_group_ids: Optional. The IDs of additional security groups to add to control plane replicas. The Anthos Multi-Cloud API will automatically create and manage security groups with the minimum rules needed for a functioning cluster.
:param 'AwsClusterControlPlaneSshConfigArgs' ssh_config: Optional. SSH configuration for how to access the underlying control plane machines.
:param Mapping[str, str] tags: Optional. A set of AWS resource tags to propagate to all underlying managed AWS resources. Specify at most 50 pairs containing alphanumerics, spaces, and symbols (.+-=_:@/). Keys can be up to 127 Unicode characters. Values can be up to 255 Unicode characters.
"""
pulumi.set(__self__, "aws_services_authentication", aws_services_authentication)
pulumi.set(__self__, "config_encryption", config_encryption)
pulumi.set(__self__, "database_encryption", database_encryption)
pulumi.set(__self__, "iam_instance_profile", iam_instance_profile)
pulumi.set(__self__, "subnet_ids", subnet_ids)
pulumi.set(__self__, "version", version)
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if main_volume is not None:
pulumi.set(__self__, "main_volume", main_volume)
if proxy_config is not None:
pulumi.set(__self__, "proxy_config", proxy_config)
if root_volume is not None:
pulumi.set(__self__, "root_volume", root_volume)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if ssh_config is not None:
pulumi.set(__self__, "ssh_config", ssh_config)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="awsServicesAuthentication")
def aws_services_authentication(self) -> 'outputs.AwsClusterControlPlaneAwsServicesAuthentication':
"""
Required. Authentication configuration for management of AWS resources.
"""
return pulumi.get(self, "aws_services_authentication")
@property
@pulumi.getter(name="configEncryption")
def config_encryption(self) -> 'outputs.AwsClusterControlPlaneConfigEncryption':
"""
Required. The ARN of the AWS KMS key used to encrypt cluster configuration.
"""
return pulumi.get(self, "config_encryption")
@property
@pulumi.getter(name="databaseEncryption")
def database_encryption(self) -> 'outputs.AwsClusterControlPlaneDatabaseEncryption':
"""
Required. The ARN of the AWS KMS key used to encrypt cluster secrets.
"""
return pulumi.get(self, "database_encryption")
@property
@pulumi.getter(name="iamInstanceProfile")
def iam_instance_profile(self) -> str:
"""
Required. The name of the AWS IAM instance pofile to assign to each control plane replica.
"""
return pulumi.get(self, "iam_instance_profile")
@property
@pulumi.getter(name="subnetIds")
def subnet_ids(self) -> Sequence[str]:
"""
Required. The list of subnets where control plane replicas will run. A replica will be provisioned on each subnet and up to three values can be provided. Each subnet must be in a different AWS Availability Zone (AZ).
"""
return pulumi.get(self, "subnet_ids")
@property
@pulumi.getter
def version(self) -> str:
"""
Required. The Kubernetes version to run on control plane replicas (e.g. `1.19.10-gke.1000`). You can list all supported versions on a given Google Cloud region by calling .
"""
return pulumi.get(self, "version")
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[str]:
"""
Optional. The AWS instance type. When unspecified, it defaults to `t3.medium`.
"""
return pulumi.get(self, "instance_type")
@property
@pulumi.getter(name="mainVolume")
def main_volume(self) -> Optional['outputs.AwsClusterControlPlaneMainVolume']:
"""
Optional. Configuration related to the main volume provisioned for each control plane replica. The main volume is in charge of storing all of the cluster's etcd state. Volumes will be provisioned in the availability zone associated with the corresponding subnet. When unspecified, it defaults to 8 GiB with the GP2 volume type.
"""
return pulumi.get(self, "main_volume")
@property
@pulumi.getter(name="proxyConfig")
def proxy_config(self) -> Optional['outputs.AwsClusterControlPlaneProxyConfig']:
"""
Proxy configuration for outbound HTTP(S) traffic.
"""
return pulumi.get(self, "proxy_config")
@property
@pulumi.getter(name="rootVolume")
def root_volume(self) -> Optional['outputs.AwsClusterControlPlaneRootVolume']:
"""
Optional. Configuration related to the root volume provisioned for each control plane replica. Volumes will be provisioned in the availability zone associated with the corresponding subnet. When unspecified, it defaults to 32 GiB with the GP2 volume type.
"""
return pulumi.get(self, "root_volume")
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[Sequence[str]]:
"""
Optional. The IDs of additional security groups to add to control plane replicas. The Anthos Multi-Cloud API will automatically create and manage security groups with the minimum rules needed for a functioning cluster.
"""
return pulumi.get(self, "security_group_ids")
@property
@pulumi.getter(name="sshConfig")
def ssh_config(self) -> Optional['outputs.AwsClusterControlPlaneSshConfig']:
"""
Optional. SSH configuration for how to access the underlying control plane machines.
"""
return pulumi.get(self, "ssh_config")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Optional. A set of AWS resource tags to propagate to all underlying managed AWS resources. Specify at most 50 pairs containing alphanumerics, spaces, and symbols (.+-=_:@/). Keys can be up to 127 Unicode characters. Values can be up to 255 Unicode characters.
"""
return pulumi.get(self, "tags")
@pulumi.output_type
class AwsClusterControlPlaneAwsServicesAuthentication(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "roleArn":
suggest = "role_arn"
elif key == "roleSessionName":
suggest = "role_session_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterControlPlaneAwsServicesAuthentication. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterControlPlaneAwsServicesAuthentication.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterControlPlaneAwsServicesAuthentication.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
role_arn: str,
role_session_name: Optional[str] = None):
"""
:param str role_arn: Required. The Amazon Resource Name (ARN) of the role that the Anthos Multi-Cloud API will assume when managing AWS resources on your account.
:param str role_session_name: Optional. An identifier for the assumed role session. When unspecified, it defaults to `multicloud-service-agent`.
"""
pulumi.set(__self__, "role_arn", role_arn)
if role_session_name is not None:
pulumi.set(__self__, "role_session_name", role_session_name)
@property
@pulumi.getter(name="roleArn")
def role_arn(self) -> str:
"""
Required. The Amazon Resource Name (ARN) of the role that the Anthos Multi-Cloud API will assume when managing AWS resources on your account.
"""
return pulumi.get(self, "role_arn")
@property
@pulumi.getter(name="roleSessionName")
def role_session_name(self) -> Optional[str]:
"""
Optional. An identifier for the assumed role session. When unspecified, it defaults to `multicloud-service-agent`.
"""
return pulumi.get(self, "role_session_name")
@pulumi.output_type
class AwsClusterControlPlaneConfigEncryption(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "kmsKeyArn":
suggest = "kms_key_arn"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterControlPlaneConfigEncryption. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterControlPlaneConfigEncryption.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterControlPlaneConfigEncryption.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kms_key_arn: str):
"""
:param str kms_key_arn: Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> str:
"""
Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
return pulumi.get(self, "kms_key_arn")
@pulumi.output_type
class AwsClusterControlPlaneDatabaseEncryption(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "kmsKeyArn":
suggest = "kms_key_arn"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterControlPlaneDatabaseEncryption. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterControlPlaneDatabaseEncryption.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterControlPlaneDatabaseEncryption.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kms_key_arn: str):
"""
:param str kms_key_arn: Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> str:
"""
Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
return pulumi.get(self, "kms_key_arn")
@pulumi.output_type
class AwsClusterControlPlaneMainVolume(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "kmsKeyArn":
suggest = "kms_key_arn"
elif key == "sizeGib":
suggest = "size_gib"
elif key == "volumeType":
suggest = "volume_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterControlPlaneMainVolume. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterControlPlaneMainVolume.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterControlPlaneMainVolume.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
iops: Optional[int] = None,
kms_key_arn: Optional[str] = None,
size_gib: Optional[int] = None,
volume_type: Optional[str] = None):
"""
:param int iops: Optional. The number of I/O operations per second (IOPS) to provision for GP3 volume.
:param str kms_key_arn: Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
:param int size_gib: Optional. The size of the volume, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
:param str volume_type: Optional. Type of the EBS volume. When unspecified, it defaults to GP2 volume. Possible values: VOLUME_TYPE_UNSPECIFIED, GP2, GP3
"""
if iops is not None:
pulumi.set(__self__, "iops", iops)
if kms_key_arn is not None:
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
if size_gib is not None:
pulumi.set(__self__, "size_gib", size_gib)
if volume_type is not None:
pulumi.set(__self__, "volume_type", volume_type)
@property
@pulumi.getter
def iops(self) -> Optional[int]:
"""
Optional. The number of I/O operations per second (IOPS) to provision for GP3 volume.
"""
return pulumi.get(self, "iops")
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> Optional[str]:
"""
Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
return pulumi.get(self, "kms_key_arn")
@property
@pulumi.getter(name="sizeGib")
def size_gib(self) -> Optional[int]:
"""
Optional. The size of the volume, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
return pulumi.get(self, "size_gib")
@property
@pulumi.getter(name="volumeType")
def volume_type(self) -> Optional[str]:
"""
Optional. Type of the EBS volume. When unspecified, it defaults to GP2 volume. Possible values: VOLUME_TYPE_UNSPECIFIED, GP2, GP3
"""
return pulumi.get(self, "volume_type")
@pulumi.output_type
class AwsClusterControlPlaneProxyConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "secretArn":
suggest = "secret_arn"
elif key == "secretVersion":
suggest = "secret_version"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterControlPlaneProxyConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterControlPlaneProxyConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterControlPlaneProxyConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
secret_arn: str,
secret_version: str):
"""
:param str secret_arn: The ARN of the AWS Secret Manager secret that contains the HTTP(S) proxy configuration.
:param str secret_version: The version string of the AWS Secret Manager secret that contains the HTTP(S) proxy configuration.
"""
pulumi.set(__self__, "secret_arn", secret_arn)
pulumi.set(__self__, "secret_version", secret_version)
@property
@pulumi.getter(name="secretArn")
def secret_arn(self) -> str:
"""
The ARN of the AWS Secret Manager secret that contains the HTTP(S) proxy configuration.
"""
return pulumi.get(self, "secret_arn")
@property
@pulumi.getter(name="secretVersion")
def secret_version(self) -> str:
"""
The version string of the AWS Secret Manager secret that contains the HTTP(S) proxy configuration.
"""
return pulumi.get(self, "secret_version")
@pulumi.output_type
class AwsClusterControlPlaneRootVolume(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "kmsKeyArn":
suggest = "kms_key_arn"
elif key == "sizeGib":
suggest = "size_gib"
elif key == "volumeType":
suggest = "volume_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterControlPlaneRootVolume. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterControlPlaneRootVolume.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterControlPlaneRootVolume.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
iops: Optional[int] = None,
kms_key_arn: Optional[str] = None,
size_gib: Optional[int] = None,
volume_type: Optional[str] = None):
"""
:param int iops: Optional. The number of I/O operations per second (IOPS) to provision for GP3 volume.
:param str kms_key_arn: Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
:param int size_gib: Optional. The size of the volume, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
:param str volume_type: Optional. Type of the EBS volume. When unspecified, it defaults to GP2 volume. Possible values: VOLUME_TYPE_UNSPECIFIED, GP2, GP3
"""
if iops is not None:
pulumi.set(__self__, "iops", iops)
if kms_key_arn is not None:
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
if size_gib is not None:
pulumi.set(__self__, "size_gib", size_gib)
if volume_type is not None:
pulumi.set(__self__, "volume_type", volume_type)
@property
@pulumi.getter
def iops(self) -> Optional[int]:
"""
Optional. The number of I/O operations per second (IOPS) to provision for GP3 volume.
"""
return pulumi.get(self, "iops")
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> Optional[str]:
"""
Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
return pulumi.get(self, "kms_key_arn")
@property
@pulumi.getter(name="sizeGib")
def size_gib(self) -> Optional[int]:
"""
Optional. The size of the volume, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
return pulumi.get(self, "size_gib")
@property
@pulumi.getter(name="volumeType")
def volume_type(self) -> Optional[str]:
"""
Optional. Type of the EBS volume. When unspecified, it defaults to GP2 volume. Possible values: VOLUME_TYPE_UNSPECIFIED, GP2, GP3
"""
return pulumi.get(self, "volume_type")
@pulumi.output_type
class AwsClusterControlPlaneSshConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ec2KeyPair":
suggest = "ec2_key_pair"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterControlPlaneSshConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterControlPlaneSshConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterControlPlaneSshConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ec2_key_pair: str):
"""
:param str ec2_key_pair: Required. The name of the EC2 key pair used to login into cluster machines.
"""
pulumi.set(__self__, "ec2_key_pair", ec2_key_pair)
@property
@pulumi.getter(name="ec2KeyPair")
def ec2_key_pair(self) -> str:
"""
Required. The name of the EC2 key pair used to login into cluster machines.
"""
return pulumi.get(self, "ec2_key_pair")
@pulumi.output_type
class AwsClusterFleet(dict):
def __init__(__self__, *,
membership: Optional[str] = None,
project: Optional[str] = None):
"""
:param str membership: -
The name of the managed Hub Membership resource associated to this cluster. Membership names are formatted as projects/<project-number>/locations/global/membership/<cluster-id>.
:param str project: The project for the resource
"""
if membership is not None:
pulumi.set(__self__, "membership", membership)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter
def membership(self) -> Optional[str]:
"""
-
The name of the managed Hub Membership resource associated to this cluster. Membership names are formatted as projects/<project-number>/locations/global/membership/<cluster-id>.
"""
return pulumi.get(self, "membership")
@property
@pulumi.getter
def project(self) -> Optional[str]:
"""
The project for the resource
"""
return pulumi.get(self, "project")
@pulumi.output_type
class AwsClusterNetworking(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "podAddressCidrBlocks":
suggest = "pod_address_cidr_blocks"
elif key == "serviceAddressCidrBlocks":
suggest = "service_address_cidr_blocks"
elif key == "vpcId":
suggest = "vpc_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterNetworking. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterNetworking.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterNetworking.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
pod_address_cidr_blocks: Sequence[str],
service_address_cidr_blocks: Sequence[str],
vpc_id: str):
"""
:param Sequence[str] pod_address_cidr_blocks: Required. All pods in the cluster are assigned an RFC1918 IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creation.
:param Sequence[str] service_address_cidr_blocks: Required. All services in the cluster are assigned an RFC1918 IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creation.
:param str vpc_id: Required. The VPC associated with the cluster. All component clusters (i.e. control plane and node pools) run on a single VPC. This field cannot be changed after creation.
"""
pulumi.set(__self__, "pod_address_cidr_blocks", pod_address_cidr_blocks)
pulumi.set(__self__, "service_address_cidr_blocks", service_address_cidr_blocks)
pulumi.set(__self__, "vpc_id", vpc_id)
@property
@pulumi.getter(name="podAddressCidrBlocks")
def pod_address_cidr_blocks(self) -> Sequence[str]:
"""
Required. All pods in the cluster are assigned an RFC1918 IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creation.
"""
return pulumi.get(self, "pod_address_cidr_blocks")
@property
@pulumi.getter(name="serviceAddressCidrBlocks")
def service_address_cidr_blocks(self) -> Sequence[str]:
"""
Required. All services in the cluster are assigned an RFC1918 IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creation.
"""
return pulumi.get(self, "service_address_cidr_blocks")
@property
@pulumi.getter(name="vpcId")
def vpc_id(self) -> str:
"""
Required. The VPC associated with the cluster. All component clusters (i.e. control plane and node pools) run on a single VPC. This field cannot be changed after creation.
"""
return pulumi.get(self, "vpc_id")
@pulumi.output_type
class AwsClusterWorkloadIdentityConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "identityProvider":
suggest = "identity_provider"
elif key == "issuerUri":
suggest = "issuer_uri"
elif key == "workloadPool":
suggest = "workload_pool"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsClusterWorkloadIdentityConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsClusterWorkloadIdentityConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsClusterWorkloadIdentityConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
identity_provider: Optional[str] = None,
issuer_uri: Optional[str] = None,
workload_pool: Optional[str] = None):
if identity_provider is not None:
pulumi.set(__self__, "identity_provider", identity_provider)
if issuer_uri is not None:
pulumi.set(__self__, "issuer_uri", issuer_uri)
if workload_pool is not None:
pulumi.set(__self__, "workload_pool", workload_pool)
@property
@pulumi.getter(name="identityProvider")
def identity_provider(self) -> Optional[str]:
return pulumi.get(self, "identity_provider")
@property
@pulumi.getter(name="issuerUri")
def issuer_uri(self) -> Optional[str]:
return pulumi.get(self, "issuer_uri")
@property
@pulumi.getter(name="workloadPool")
def workload_pool(self) -> Optional[str]:
return pulumi.get(self, "workload_pool")
@pulumi.output_type
class AwsNodePoolAutoscaling(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxNodeCount":
suggest = "max_node_count"
elif key == "minNodeCount":
suggest = "min_node_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsNodePoolAutoscaling. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsNodePoolAutoscaling.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsNodePoolAutoscaling.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
max_node_count: int,
min_node_count: int):
"""
:param int max_node_count: Required. Maximum number of nodes in the NodePool. Must be >= min_node_count.
:param int min_node_count: Required. Minimum number of nodes in the NodePool. Must be >= 1 and <= max_node_count.
"""
pulumi.set(__self__, "max_node_count", max_node_count)
pulumi.set(__self__, "min_node_count", min_node_count)
@property
@pulumi.getter(name="maxNodeCount")
def max_node_count(self) -> int:
"""
Required. Maximum number of nodes in the NodePool. Must be >= min_node_count.
"""
return pulumi.get(self, "max_node_count")
@property
@pulumi.getter(name="minNodeCount")
def min_node_count(self) -> int:
"""
Required. Minimum number of nodes in the NodePool. Must be >= 1 and <= max_node_count.
"""
return pulumi.get(self, "min_node_count")
@pulumi.output_type
class AwsNodePoolConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "configEncryption":
suggest = "config_encryption"
elif key == "iamInstanceProfile":
suggest = "iam_instance_profile"
elif key == "instanceType":
suggest = "instance_type"
elif key == "rootVolume":
suggest = "root_volume"
elif key == "securityGroupIds":
suggest = "security_group_ids"
elif key == "sshConfig":
suggest = "ssh_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsNodePoolConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsNodePoolConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsNodePoolConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
config_encryption: 'outputs.AwsNodePoolConfigConfigEncryption',
iam_instance_profile: str,
instance_type: Optional[str] = None,
labels: Optional[Mapping[str, str]] = None,
root_volume: Optional['outputs.AwsNodePoolConfigRootVolume'] = None,
security_group_ids: Optional[Sequence[str]] = None,
ssh_config: Optional['outputs.AwsNodePoolConfigSshConfig'] = None,
tags: Optional[Mapping[str, str]] = None,
taints: Optional[Sequence['outputs.AwsNodePoolConfigTaint']] = None):
"""
:param 'AwsNodePoolConfigConfigEncryptionArgs' config_encryption: Required. The ARN of the AWS KMS key used to encrypt node pool configuration.
:param str iam_instance_profile: Required. The name of the AWS IAM role assigned to nodes in the pool.
:param str instance_type: Optional. The AWS instance type. When unspecified, it defaults to `t3.medium`.
:param Mapping[str, str] labels: Optional. The initial labels assigned to nodes of this node pool. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
:param 'AwsNodePoolConfigRootVolumeArgs' root_volume: Optional. Template for the root volume provisioned for node pool nodes. Volumes will be provisioned in the availability zone assigned to the node pool subnet. When unspecified, it defaults to 32 GiB with the GP2 volume type.
:param Sequence[str] security_group_ids: Optional. The IDs of additional security groups to add to nodes in this pool. The manager will automatically create security groups with minimum rules needed for a functioning cluster.
:param 'AwsNodePoolConfigSshConfigArgs' ssh_config: Optional. The SSH configuration.
:param Mapping[str, str] tags: Optional. Key/value metadata to assign to each underlying AWS resource. Specify at most 50 pairs containing alphanumerics, spaces, and symbols (.+-=_:@/). Keys can be up to 127 Unicode characters. Values can be up to 255 Unicode characters.
:param Sequence['AwsNodePoolConfigTaintArgs'] taints: Optional. The initial taints assigned to nodes of this node pool.
"""
pulumi.set(__self__, "config_encryption", config_encryption)
pulumi.set(__self__, "iam_instance_profile", iam_instance_profile)
if instance_type is not None:
pulumi.set(__self__, "instance_type", instance_type)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if root_volume is not None:
pulumi.set(__self__, "root_volume", root_volume)
if security_group_ids is not None:
pulumi.set(__self__, "security_group_ids", security_group_ids)
if ssh_config is not None:
pulumi.set(__self__, "ssh_config", ssh_config)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if taints is not None:
pulumi.set(__self__, "taints", taints)
@property
@pulumi.getter(name="configEncryption")
def config_encryption(self) -> 'outputs.AwsNodePoolConfigConfigEncryption':
"""
Required. The ARN of the AWS KMS key used to encrypt node pool configuration.
"""
return pulumi.get(self, "config_encryption")
@property
@pulumi.getter(name="iamInstanceProfile")
def iam_instance_profile(self) -> str:
"""
Required. The name of the AWS IAM role assigned to nodes in the pool.
"""
return pulumi.get(self, "iam_instance_profile")
@property
@pulumi.getter(name="instanceType")
def instance_type(self) -> Optional[str]:
"""
Optional. The AWS instance type. When unspecified, it defaults to `t3.medium`.
"""
return pulumi.get(self, "instance_type")
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, str]]:
"""
Optional. The initial labels assigned to nodes of this node pool. An object containing a list of "key": value pairs. Example: { "name": "wrench", "mass": "1.3kg", "count": "3" }.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="rootVolume")
def root_volume(self) -> Optional['outputs.AwsNodePoolConfigRootVolume']:
"""
Optional. Template for the root volume provisioned for node pool nodes. Volumes will be provisioned in the availability zone assigned to the node pool subnet. When unspecified, it defaults to 32 GiB with the GP2 volume type.
"""
return pulumi.get(self, "root_volume")
@property
@pulumi.getter(name="securityGroupIds")
def security_group_ids(self) -> Optional[Sequence[str]]:
"""
Optional. The IDs of additional security groups to add to nodes in this pool. The manager will automatically create security groups with minimum rules needed for a functioning cluster.
"""
return pulumi.get(self, "security_group_ids")
@property
@pulumi.getter(name="sshConfig")
def ssh_config(self) -> Optional['outputs.AwsNodePoolConfigSshConfig']:
"""
Optional. The SSH configuration.
"""
return pulumi.get(self, "ssh_config")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Optional. Key/value metadata to assign to each underlying AWS resource. Specify at most 50 pairs containing alphanumerics, spaces, and symbols (.+-=_:@/). Keys can be up to 127 Unicode characters. Values can be up to 255 Unicode characters.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def taints(self) -> Optional[Sequence['outputs.AwsNodePoolConfigTaint']]:
"""
Optional. The initial taints assigned to nodes of this node pool.
"""
return pulumi.get(self, "taints")
@pulumi.output_type
class AwsNodePoolConfigConfigEncryption(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "kmsKeyArn":
suggest = "kms_key_arn"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsNodePoolConfigConfigEncryption. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsNodePoolConfigConfigEncryption.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsNodePoolConfigConfigEncryption.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
kms_key_arn: str):
"""
:param str kms_key_arn: Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> str:
"""
Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
return pulumi.get(self, "kms_key_arn")
@pulumi.output_type
class AwsNodePoolConfigRootVolume(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "kmsKeyArn":
suggest = "kms_key_arn"
elif key == "sizeGib":
suggest = "size_gib"
elif key == "volumeType":
suggest = "volume_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsNodePoolConfigRootVolume. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsNodePoolConfigRootVolume.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsNodePoolConfigRootVolume.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
iops: Optional[int] = None,
kms_key_arn: Optional[str] = None,
size_gib: Optional[int] = None,
volume_type: Optional[str] = None):
"""
:param int iops: Optional. The number of I/O operations per second (IOPS) to provision for GP3 volume.
:param str kms_key_arn: Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
:param int size_gib: Optional. The size of the volume, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
:param str volume_type: Optional. Type of the EBS volume. When unspecified, it defaults to GP2 volume. Possible values: VOLUME_TYPE_UNSPECIFIED, GP2, GP3
"""
if iops is not None:
pulumi.set(__self__, "iops", iops)
if kms_key_arn is not None:
pulumi.set(__self__, "kms_key_arn", kms_key_arn)
if size_gib is not None:
pulumi.set(__self__, "size_gib", size_gib)
if volume_type is not None:
pulumi.set(__self__, "volume_type", volume_type)
@property
@pulumi.getter
def iops(self) -> Optional[int]:
"""
Optional. The number of I/O operations per second (IOPS) to provision for GP3 volume.
"""
return pulumi.get(self, "iops")
@property
@pulumi.getter(name="kmsKeyArn")
def kms_key_arn(self) -> Optional[str]:
"""
Optional. The Amazon Resource Name (ARN) of the Customer Managed Key (CMK) used to encrypt AWS EBS volumes. If not specified, the default Amazon managed key associated to the AWS region where this cluster runs will be used.
"""
return pulumi.get(self, "kms_key_arn")
@property
@pulumi.getter(name="sizeGib")
def size_gib(self) -> Optional[int]:
"""
Optional. The size of the volume, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
return pulumi.get(self, "size_gib")
@property
@pulumi.getter(name="volumeType")
def volume_type(self) -> Optional[str]:
"""
Optional. Type of the EBS volume. When unspecified, it defaults to GP2 volume. Possible values: VOLUME_TYPE_UNSPECIFIED, GP2, GP3
"""
return pulumi.get(self, "volume_type")
@pulumi.output_type
class AwsNodePoolConfigSshConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "ec2KeyPair":
suggest = "ec2_key_pair"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsNodePoolConfigSshConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsNodePoolConfigSshConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsNodePoolConfigSshConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ec2_key_pair: str):
"""
:param str ec2_key_pair: Required. The name of the EC2 key pair used to login into cluster machines.
"""
pulumi.set(__self__, "ec2_key_pair", ec2_key_pair)
@property
@pulumi.getter(name="ec2KeyPair")
def ec2_key_pair(self) -> str:
"""
Required. The name of the EC2 key pair used to login into cluster machines.
"""
return pulumi.get(self, "ec2_key_pair")
@pulumi.output_type
class AwsNodePoolConfigTaint(dict):
def __init__(__self__, *,
effect: str,
key: str,
value: str):
"""
:param str effect: Required. The taint effect. Possible values: EFFECT_UNSPECIFIED, NO_SCHEDULE, PREFER_NO_SCHEDULE, NO_EXECUTE
:param str key: Required. Key for the taint.
:param str value: Required. Value for the taint.
"""
pulumi.set(__self__, "effect", effect)
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def effect(self) -> str:
"""
Required. The taint effect. Possible values: EFFECT_UNSPECIFIED, NO_SCHEDULE, PREFER_NO_SCHEDULE, NO_EXECUTE
"""
return pulumi.get(self, "effect")
@property
@pulumi.getter
def key(self) -> str:
"""
Required. Key for the taint.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
"""
Required. Value for the taint.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class AwsNodePoolMaxPodsConstraint(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxPodsPerNode":
suggest = "max_pods_per_node"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AwsNodePoolMaxPodsConstraint. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AwsNodePoolMaxPodsConstraint.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AwsNodePoolMaxPodsConstraint.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
max_pods_per_node: int):
"""
:param int max_pods_per_node: Required. The maximum number of pods to schedule on a single node.
"""
pulumi.set(__self__, "max_pods_per_node", max_pods_per_node)
@property
@pulumi.getter(name="maxPodsPerNode")
def max_pods_per_node(self) -> int:
"""
Required. The maximum number of pods to schedule on a single node.
"""
return pulumi.get(self, "max_pods_per_node")
@pulumi.output_type
class AzureClusterAuthorization(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "adminUsers":
suggest = "admin_users"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterAuthorization. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterAuthorization.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterAuthorization.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
admin_users: Sequence['outputs.AzureClusterAuthorizationAdminUser']):
"""
:param Sequence['AzureClusterAuthorizationAdminUserArgs'] admin_users: Required. Users that can perform operations as a cluster admin. A new ClusterRoleBinding will be created to grant the cluster-admin ClusterRole to the users. At most one user can be specified. For more info on RBAC, see https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles
"""
pulumi.set(__self__, "admin_users", admin_users)
@property
@pulumi.getter(name="adminUsers")
def admin_users(self) -> Sequence['outputs.AzureClusterAuthorizationAdminUser']:
"""
Required. Users that can perform operations as a cluster admin. A new ClusterRoleBinding will be created to grant the cluster-admin ClusterRole to the users. At most one user can be specified. For more info on RBAC, see https://kubernetes.io/docs/reference/access-authn-authz/rbac/#user-facing-roles
"""
return pulumi.get(self, "admin_users")
@pulumi.output_type
class AzureClusterAuthorizationAdminUser(dict):
def __init__(__self__, *,
username: str):
"""
:param str username: Required. The name of the user, e.g. `my-gcp-id@gmail.com`.
"""
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def username(self) -> str:
"""
Required. The name of the user, e.g. `my-gcp-id@gmail.com`.
"""
return pulumi.get(self, "username")
@pulumi.output_type
class AzureClusterControlPlane(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sshConfig":
suggest = "ssh_config"
elif key == "subnetId":
suggest = "subnet_id"
elif key == "databaseEncryption":
suggest = "database_encryption"
elif key == "mainVolume":
suggest = "main_volume"
elif key == "proxyConfig":
suggest = "proxy_config"
elif key == "replicaPlacements":
suggest = "replica_placements"
elif key == "rootVolume":
suggest = "root_volume"
elif key == "vmSize":
suggest = "vm_size"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterControlPlane. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterControlPlane.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterControlPlane.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ssh_config: 'outputs.AzureClusterControlPlaneSshConfig',
subnet_id: str,
version: str,
database_encryption: Optional['outputs.AzureClusterControlPlaneDatabaseEncryption'] = None,
main_volume: Optional['outputs.AzureClusterControlPlaneMainVolume'] = None,
proxy_config: Optional['outputs.AzureClusterControlPlaneProxyConfig'] = None,
replica_placements: Optional[Sequence['outputs.AzureClusterControlPlaneReplicaPlacement']] = None,
root_volume: Optional['outputs.AzureClusterControlPlaneRootVolume'] = None,
tags: Optional[Mapping[str, str]] = None,
vm_size: Optional[str] = None):
"""
:param 'AzureClusterControlPlaneSshConfigArgs' ssh_config: Required. SSH configuration for how to access the underlying control plane machines.
:param str subnet_id: For a given replica, the ARM ID of the subnet where the control plane VM is deployed. Make sure it's a subnet under the virtual network in the cluster configuration.
:param str version: Required. The Kubernetes version to run on control plane replicas (e.g. `1.19.10-gke.1000`). You can list all supported versions on a given Google Cloud region by calling GetAzureServerConfig.
:param 'AzureClusterControlPlaneDatabaseEncryptionArgs' database_encryption: Optional. Configuration related to application-layer secrets encryption.
:param 'AzureClusterControlPlaneMainVolumeArgs' main_volume: Optional. Configuration related to the main volume provisioned for each control plane replica. The main volume is in charge of storing all of the cluster's etcd state. When unspecified, it defaults to a 8-GiB Azure Disk.
:param 'AzureClusterControlPlaneProxyConfigArgs' proxy_config: Proxy configuration for outbound HTTP(S) traffic.
:param Sequence['AzureClusterControlPlaneReplicaPlacementArgs'] replica_placements: Configuration for where to place the control plane replicas. Up to three replica placement instances can be specified. If replica_placements is set, the replica placement instances will be applied to the three control plane replicas as evenly as possible.
:param 'AzureClusterControlPlaneRootVolumeArgs' root_volume: Optional. Configuration related to the root volume provisioned for each control plane replica. When unspecified, it defaults to 32-GiB Azure Disk.
:param Mapping[str, str] tags: Optional. A set of tags to apply to all underlying control plane Azure resources.
:param str vm_size: Optional. The Azure VM size name. Example: `Standard_DS2_v2`. For available VM sizes, see https://docs.microsoft.com/en-us/azure/virtual-machines/vm-naming-conventions. When unspecified, it defaults to `Standard_DS2_v2`.
"""
pulumi.set(__self__, "ssh_config", ssh_config)
pulumi.set(__self__, "subnet_id", subnet_id)
pulumi.set(__self__, "version", version)
if database_encryption is not None:
pulumi.set(__self__, "database_encryption", database_encryption)
if main_volume is not None:
pulumi.set(__self__, "main_volume", main_volume)
if proxy_config is not None:
pulumi.set(__self__, "proxy_config", proxy_config)
if replica_placements is not None:
pulumi.set(__self__, "replica_placements", replica_placements)
if root_volume is not None:
pulumi.set(__self__, "root_volume", root_volume)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if vm_size is not None:
pulumi.set(__self__, "vm_size", vm_size)
@property
@pulumi.getter(name="sshConfig")
def ssh_config(self) -> 'outputs.AzureClusterControlPlaneSshConfig':
"""
Required. SSH configuration for how to access the underlying control plane machines.
"""
return pulumi.get(self, "ssh_config")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> str:
"""
For a given replica, the ARM ID of the subnet where the control plane VM is deployed. Make sure it's a subnet under the virtual network in the cluster configuration.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter
def version(self) -> str:
"""
Required. The Kubernetes version to run on control plane replicas (e.g. `1.19.10-gke.1000`). You can list all supported versions on a given Google Cloud region by calling GetAzureServerConfig.
"""
return pulumi.get(self, "version")
@property
@pulumi.getter(name="databaseEncryption")
def database_encryption(self) -> Optional['outputs.AzureClusterControlPlaneDatabaseEncryption']:
"""
Optional. Configuration related to application-layer secrets encryption.
"""
return pulumi.get(self, "database_encryption")
@property
@pulumi.getter(name="mainVolume")
def main_volume(self) -> Optional['outputs.AzureClusterControlPlaneMainVolume']:
"""
Optional. Configuration related to the main volume provisioned for each control plane replica. The main volume is in charge of storing all of the cluster's etcd state. When unspecified, it defaults to a 8-GiB Azure Disk.
"""
return pulumi.get(self, "main_volume")
@property
@pulumi.getter(name="proxyConfig")
def proxy_config(self) -> Optional['outputs.AzureClusterControlPlaneProxyConfig']:
"""
Proxy configuration for outbound HTTP(S) traffic.
"""
return pulumi.get(self, "proxy_config")
@property
@pulumi.getter(name="replicaPlacements")
def replica_placements(self) -> Optional[Sequence['outputs.AzureClusterControlPlaneReplicaPlacement']]:
"""
Configuration for where to place the control plane replicas. Up to three replica placement instances can be specified. If replica_placements is set, the replica placement instances will be applied to the three control plane replicas as evenly as possible.
"""
return pulumi.get(self, "replica_placements")
@property
@pulumi.getter(name="rootVolume")
def root_volume(self) -> Optional['outputs.AzureClusterControlPlaneRootVolume']:
"""
Optional. Configuration related to the root volume provisioned for each control plane replica. When unspecified, it defaults to 32-GiB Azure Disk.
"""
return pulumi.get(self, "root_volume")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Optional. A set of tags to apply to all underlying control plane Azure resources.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> Optional[str]:
"""
Optional. The Azure VM size name. Example: `Standard_DS2_v2`. For available VM sizes, see https://docs.microsoft.com/en-us/azure/virtual-machines/vm-naming-conventions. When unspecified, it defaults to `Standard_DS2_v2`.
"""
return pulumi.get(self, "vm_size")
@pulumi.output_type
class AzureClusterControlPlaneDatabaseEncryption(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "keyId":
suggest = "key_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterControlPlaneDatabaseEncryption. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterControlPlaneDatabaseEncryption.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterControlPlaneDatabaseEncryption.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
key_id: str):
"""
:param str key_id: The ARM ID of the Azure Key Vault key to encrypt / decrypt data. For example: `/subscriptions/<subscription-id>/resourceGroups/<resource-group-id>/providers/Microsoft.KeyVault/vaults/<key-vault-id>/keys/<key-name>` Encryption will always take the latest version of the key and hence specific version is not supported.
"""
pulumi.set(__self__, "key_id", key_id)
@property
@pulumi.getter(name="keyId")
def key_id(self) -> str:
"""
The ARM ID of the Azure Key Vault key to encrypt / decrypt data. For example: `/subscriptions/<subscription-id>/resourceGroups/<resource-group-id>/providers/Microsoft.KeyVault/vaults/<key-vault-id>/keys/<key-name>` Encryption will always take the latest version of the key and hence specific version is not supported.
"""
return pulumi.get(self, "key_id")
@pulumi.output_type
class AzureClusterControlPlaneMainVolume(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sizeGib":
suggest = "size_gib"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterControlPlaneMainVolume. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterControlPlaneMainVolume.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterControlPlaneMainVolume.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
size_gib: Optional[int] = None):
"""
:param int size_gib: Optional. The size of the disk, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
if size_gib is not None:
pulumi.set(__self__, "size_gib", size_gib)
@property
@pulumi.getter(name="sizeGib")
def size_gib(self) -> Optional[int]:
"""
Optional. The size of the disk, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
return pulumi.get(self, "size_gib")
@pulumi.output_type
class AzureClusterControlPlaneProxyConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resourceGroupId":
suggest = "resource_group_id"
elif key == "secretId":
suggest = "secret_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterControlPlaneProxyConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterControlPlaneProxyConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterControlPlaneProxyConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
resource_group_id: str,
secret_id: str):
"""
:param str resource_group_id: The ARM ID the of the resource group containing proxy keyvault. Resource group ids are formatted as `/subscriptions/<subscription-id>/resourceGroups/<resource-group-name>`
:param str secret_id: The URL the of the proxy setting secret with its version. Secret ids are formatted as `https:<key-vault-name>.vault.azure.net/secrets/<secret-name>/<secret-version>`.
"""
pulumi.set(__self__, "resource_group_id", resource_group_id)
pulumi.set(__self__, "secret_id", secret_id)
@property
@pulumi.getter(name="resourceGroupId")
def resource_group_id(self) -> str:
"""
The ARM ID the of the resource group containing proxy keyvault. Resource group ids are formatted as `/subscriptions/<subscription-id>/resourceGroups/<resource-group-name>`
"""
return pulumi.get(self, "resource_group_id")
@property
@pulumi.getter(name="secretId")
def secret_id(self) -> str:
"""
The URL the of the proxy setting secret with its version. Secret ids are formatted as `https:<key-vault-name>.vault.azure.net/secrets/<secret-name>/<secret-version>`.
"""
return pulumi.get(self, "secret_id")
@pulumi.output_type
class AzureClusterControlPlaneReplicaPlacement(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "azureAvailabilityZone":
suggest = "azure_availability_zone"
elif key == "subnetId":
suggest = "subnet_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterControlPlaneReplicaPlacement. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterControlPlaneReplicaPlacement.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterControlPlaneReplicaPlacement.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
azure_availability_zone: str,
subnet_id: str):
"""
:param str azure_availability_zone: For a given replica, the Azure availability zone where to provision the control plane VM and the ETCD disk.
:param str subnet_id: For a given replica, the ARM ID of the subnet where the control plane VM is deployed. Make sure it's a subnet under the virtual network in the cluster configuration.
"""
pulumi.set(__self__, "azure_availability_zone", azure_availability_zone)
pulumi.set(__self__, "subnet_id", subnet_id)
@property
@pulumi.getter(name="azureAvailabilityZone")
def azure_availability_zone(self) -> str:
"""
For a given replica, the Azure availability zone where to provision the control plane VM and the ETCD disk.
"""
return pulumi.get(self, "azure_availability_zone")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> str:
"""
For a given replica, the ARM ID of the subnet where the control plane VM is deployed. Make sure it's a subnet under the virtual network in the cluster configuration.
"""
return pulumi.get(self, "subnet_id")
@pulumi.output_type
class AzureClusterControlPlaneRootVolume(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sizeGib":
suggest = "size_gib"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterControlPlaneRootVolume. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterControlPlaneRootVolume.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterControlPlaneRootVolume.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
size_gib: Optional[int] = None):
"""
:param int size_gib: Optional. The size of the disk, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
if size_gib is not None:
pulumi.set(__self__, "size_gib", size_gib)
@property
@pulumi.getter(name="sizeGib")
def size_gib(self) -> Optional[int]:
"""
Optional. The size of the disk, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
return pulumi.get(self, "size_gib")
@pulumi.output_type
class AzureClusterControlPlaneSshConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "authorizedKey":
suggest = "authorized_key"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterControlPlaneSshConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterControlPlaneSshConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterControlPlaneSshConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
authorized_key: str):
"""
:param str authorized_key: Required. The SSH public key data for VMs managed by Anthos. This accepts the authorized_keys file format used in OpenSSH according to the sshd(8) manual page.
"""
pulumi.set(__self__, "authorized_key", authorized_key)
@property
@pulumi.getter(name="authorizedKey")
def authorized_key(self) -> str:
"""
Required. The SSH public key data for VMs managed by Anthos. This accepts the authorized_keys file format used in OpenSSH according to the sshd(8) manual page.
"""
return pulumi.get(self, "authorized_key")
@pulumi.output_type
class AzureClusterFleet(dict):
def __init__(__self__, *,
membership: Optional[str] = None,
project: Optional[str] = None):
"""
:param str membership: -
The name of the managed Hub Membership resource associated to this cluster. Membership names are formatted as projects/<project-number>/locations/global/membership/<cluster-id>.
:param str project: The project for the resource
"""
if membership is not None:
pulumi.set(__self__, "membership", membership)
if project is not None:
pulumi.set(__self__, "project", project)
@property
@pulumi.getter
def membership(self) -> Optional[str]:
"""
-
The name of the managed Hub Membership resource associated to this cluster. Membership names are formatted as projects/<project-number>/locations/global/membership/<cluster-id>.
"""
return pulumi.get(self, "membership")
@property
@pulumi.getter
def project(self) -> Optional[str]:
"""
The project for the resource
"""
return pulumi.get(self, "project")
@pulumi.output_type
class AzureClusterNetworking(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "podAddressCidrBlocks":
suggest = "pod_address_cidr_blocks"
elif key == "serviceAddressCidrBlocks":
suggest = "service_address_cidr_blocks"
elif key == "virtualNetworkId":
suggest = "virtual_network_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterNetworking. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterNetworking.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterNetworking.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
pod_address_cidr_blocks: Sequence[str],
service_address_cidr_blocks: Sequence[str],
virtual_network_id: str):
"""
:param Sequence[str] pod_address_cidr_blocks: Required. The IP address range of the pods in this cluster, in CIDR notation (e.g. `10.96.0.0/14`). All pods in the cluster get assigned a unique RFC1918 IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creation.
:param Sequence[str] service_address_cidr_blocks: Required. The IP address range for services in this cluster, in CIDR notation (e.g. `10.96.0.0/14`). All services in the cluster get assigned a unique RFC1918 IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creating a cluster.
:param str virtual_network_id: Required. The Azure Resource Manager (ARM) ID of the VNet associated with your cluster. All components in the cluster (i.e. control plane and node pools) run on a single VNet. Example: `/subscriptions/*/resourceGroups/*/providers/Microsoft.Network/virtualNetworks/*` This field cannot be changed after creation.
"""
pulumi.set(__self__, "pod_address_cidr_blocks", pod_address_cidr_blocks)
pulumi.set(__self__, "service_address_cidr_blocks", service_address_cidr_blocks)
pulumi.set(__self__, "virtual_network_id", virtual_network_id)
@property
@pulumi.getter(name="podAddressCidrBlocks")
def pod_address_cidr_blocks(self) -> Sequence[str]:
"""
Required. The IP address range of the pods in this cluster, in CIDR notation (e.g. `10.96.0.0/14`). All pods in the cluster get assigned a unique RFC1918 IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creation.
"""
return pulumi.get(self, "pod_address_cidr_blocks")
@property
@pulumi.getter(name="serviceAddressCidrBlocks")
def service_address_cidr_blocks(self) -> Sequence[str]:
"""
Required. The IP address range for services in this cluster, in CIDR notation (e.g. `10.96.0.0/14`). All services in the cluster get assigned a unique RFC1918 IPv4 address from these ranges. Only a single range is supported. This field cannot be changed after creating a cluster.
"""
return pulumi.get(self, "service_address_cidr_blocks")
@property
@pulumi.getter(name="virtualNetworkId")
def virtual_network_id(self) -> str:
"""
Required. The Azure Resource Manager (ARM) ID of the VNet associated with your cluster. All components in the cluster (i.e. control plane and node pools) run on a single VNet. Example: `/subscriptions/*/resourceGroups/*/providers/Microsoft.Network/virtualNetworks/*` This field cannot be changed after creation.
"""
return pulumi.get(self, "virtual_network_id")
@pulumi.output_type
class AzureClusterWorkloadIdentityConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "identityProvider":
suggest = "identity_provider"
elif key == "issuerUri":
suggest = "issuer_uri"
elif key == "workloadPool":
suggest = "workload_pool"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureClusterWorkloadIdentityConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureClusterWorkloadIdentityConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureClusterWorkloadIdentityConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
identity_provider: Optional[str] = None,
issuer_uri: Optional[str] = None,
workload_pool: Optional[str] = None):
if identity_provider is not None:
pulumi.set(__self__, "identity_provider", identity_provider)
if issuer_uri is not None:
pulumi.set(__self__, "issuer_uri", issuer_uri)
if workload_pool is not None:
pulumi.set(__self__, "workload_pool", workload_pool)
@property
@pulumi.getter(name="identityProvider")
def identity_provider(self) -> Optional[str]:
return pulumi.get(self, "identity_provider")
@property
@pulumi.getter(name="issuerUri")
def issuer_uri(self) -> Optional[str]:
return pulumi.get(self, "issuer_uri")
@property
@pulumi.getter(name="workloadPool")
def workload_pool(self) -> Optional[str]:
return pulumi.get(self, "workload_pool")
@pulumi.output_type
class AzureNodePoolAutoscaling(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxNodeCount":
suggest = "max_node_count"
elif key == "minNodeCount":
suggest = "min_node_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureNodePoolAutoscaling. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureNodePoolAutoscaling.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureNodePoolAutoscaling.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
max_node_count: int,
min_node_count: int):
"""
:param int max_node_count: Required. Maximum number of nodes in the node pool. Must be >= min_node_count.
:param int min_node_count: Required. Minimum number of nodes in the node pool. Must be >= 1 and <= max_node_count.
"""
pulumi.set(__self__, "max_node_count", max_node_count)
pulumi.set(__self__, "min_node_count", min_node_count)
@property
@pulumi.getter(name="maxNodeCount")
def max_node_count(self) -> int:
"""
Required. Maximum number of nodes in the node pool. Must be >= min_node_count.
"""
return pulumi.get(self, "max_node_count")
@property
@pulumi.getter(name="minNodeCount")
def min_node_count(self) -> int:
"""
Required. Minimum number of nodes in the node pool. Must be >= 1 and <= max_node_count.
"""
return pulumi.get(self, "min_node_count")
@pulumi.output_type
class AzureNodePoolConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sshConfig":
suggest = "ssh_config"
elif key == "rootVolume":
suggest = "root_volume"
elif key == "vmSize":
suggest = "vm_size"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureNodePoolConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureNodePoolConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureNodePoolConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
ssh_config: 'outputs.AzureNodePoolConfigSshConfig',
root_volume: Optional['outputs.AzureNodePoolConfigRootVolume'] = None,
tags: Optional[Mapping[str, str]] = None,
vm_size: Optional[str] = None):
"""
:param 'AzureNodePoolConfigSshConfigArgs' ssh_config: Required. SSH configuration for how to access the node pool machines.
:param 'AzureNodePoolConfigRootVolumeArgs' root_volume: Optional. Configuration related to the root volume provisioned for each node pool machine. When unspecified, it defaults to a 32-GiB Azure Disk.
:param Mapping[str, str] tags: Optional. A set of tags to apply to all underlying Azure resources for this node pool. This currently only includes Virtual Machine Scale Sets. Specify at most 50 pairs containing alphanumerics, spaces, and symbols (.+-=_:@/). Keys can be up to 127 Unicode characters. Values can be up to 255 Unicode characters.
:param str vm_size: Optional. The Azure VM size name. Example: `Standard_DS2_v2`. See (/anthos/clusters/docs/azure/reference/supported-vms) for options. When unspecified, it defaults to `Standard_DS2_v2`.
"""
pulumi.set(__self__, "ssh_config", ssh_config)
if root_volume is not None:
pulumi.set(__self__, "root_volume", root_volume)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if vm_size is not None:
pulumi.set(__self__, "vm_size", vm_size)
@property
@pulumi.getter(name="sshConfig")
def ssh_config(self) -> 'outputs.AzureNodePoolConfigSshConfig':
"""
Required. SSH configuration for how to access the node pool machines.
"""
return pulumi.get(self, "ssh_config")
@property
@pulumi.getter(name="rootVolume")
def root_volume(self) -> Optional['outputs.AzureNodePoolConfigRootVolume']:
"""
Optional. Configuration related to the root volume provisioned for each node pool machine. When unspecified, it defaults to a 32-GiB Azure Disk.
"""
return pulumi.get(self, "root_volume")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Optional. A set of tags to apply to all underlying Azure resources for this node pool. This currently only includes Virtual Machine Scale Sets. Specify at most 50 pairs containing alphanumerics, spaces, and symbols (.+-=_:@/). Keys can be up to 127 Unicode characters. Values can be up to 255 Unicode characters.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="vmSize")
def vm_size(self) -> Optional[str]:
"""
Optional. The Azure VM size name. Example: `Standard_DS2_v2`. See (/anthos/clusters/docs/azure/reference/supported-vms) for options. When unspecified, it defaults to `Standard_DS2_v2`.
"""
return pulumi.get(self, "vm_size")
@pulumi.output_type
class AzureNodePoolConfigRootVolume(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sizeGib":
suggest = "size_gib"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureNodePoolConfigRootVolume. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureNodePoolConfigRootVolume.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureNodePoolConfigRootVolume.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
size_gib: Optional[int] = None):
"""
:param int size_gib: Optional. The size of the disk, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
if size_gib is not None:
pulumi.set(__self__, "size_gib", size_gib)
@property
@pulumi.getter(name="sizeGib")
def size_gib(self) -> Optional[int]:
"""
Optional. The size of the disk, in GiBs. When unspecified, a default value is provided. See the specific reference in the parent resource.
"""
return pulumi.get(self, "size_gib")
@pulumi.output_type
class AzureNodePoolConfigSshConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "authorizedKey":
suggest = "authorized_key"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureNodePoolConfigSshConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureNodePoolConfigSshConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureNodePoolConfigSshConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
authorized_key: str):
"""
:param str authorized_key: Required. The SSH public key data for VMs managed by Anthos. This accepts the authorized_keys file format used in OpenSSH according to the sshd(8) manual page.
"""
pulumi.set(__self__, "authorized_key", authorized_key)
@property
@pulumi.getter(name="authorizedKey")
def authorized_key(self) -> str:
"""
Required. The SSH public key data for VMs managed by Anthos. This accepts the authorized_keys file format used in OpenSSH according to the sshd(8) manual page.
"""
return pulumi.get(self, "authorized_key")
@pulumi.output_type
class AzureNodePoolMaxPodsConstraint(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxPodsPerNode":
suggest = "max_pods_per_node"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in AzureNodePoolMaxPodsConstraint. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
AzureNodePoolMaxPodsConstraint.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
AzureNodePoolMaxPodsConstraint.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
max_pods_per_node: int):
"""
:param int max_pods_per_node: Required. The maximum number of pods to schedule on a single node.
"""
pulumi.set(__self__, "max_pods_per_node", max_pods_per_node)
@property
@pulumi.getter(name="maxPodsPerNode")
def max_pods_per_node(self) -> int:
"""
Required. The maximum number of pods to schedule on a single node.
"""
return pulumi.get(self, "max_pods_per_node")
@pulumi.output_type
class ClusterAddonsConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cloudrunConfig":
suggest = "cloudrun_config"
elif key == "configConnectorConfig":
suggest = "config_connector_config"
elif key == "dnsCacheConfig":
suggest = "dns_cache_config"
elif key == "gcePersistentDiskCsiDriverConfig":
suggest = "gce_persistent_disk_csi_driver_config"
elif key == "horizontalPodAutoscaling":
suggest = "horizontal_pod_autoscaling"
elif key == "httpLoadBalancing":
suggest = "http_load_balancing"
elif key == "istioConfig":
suggest = "istio_config"
elif key == "kalmConfig":
suggest = "kalm_config"
elif key == "networkPolicyConfig":
suggest = "network_policy_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterAddonsConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterAddonsConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterAddonsConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cloudrun_config: Optional['outputs.ClusterAddonsConfigCloudrunConfig'] = None,
config_connector_config: Optional['outputs.ClusterAddonsConfigConfigConnectorConfig'] = None,
dns_cache_config: Optional['outputs.ClusterAddonsConfigDnsCacheConfig'] = None,
gce_persistent_disk_csi_driver_config: Optional['outputs.ClusterAddonsConfigGcePersistentDiskCsiDriverConfig'] = None,
horizontal_pod_autoscaling: Optional['outputs.ClusterAddonsConfigHorizontalPodAutoscaling'] = None,
http_load_balancing: Optional['outputs.ClusterAddonsConfigHttpLoadBalancing'] = None,
istio_config: Optional['outputs.ClusterAddonsConfigIstioConfig'] = None,
kalm_config: Optional['outputs.ClusterAddonsConfigKalmConfig'] = None,
network_policy_config: Optional['outputs.ClusterAddonsConfigNetworkPolicyConfig'] = None):
"""
:param 'ClusterAddonsConfigCloudrunConfigArgs' cloudrun_config: . Structure is documented below.
:param 'ClusterAddonsConfigConfigConnectorConfigArgs' config_connector_config: .
The status of the ConfigConnector addon. It is disabled by default; Set `enabled = true` to enable.
:param 'ClusterAddonsConfigDnsCacheConfigArgs' dns_cache_config: .
The status of the NodeLocal DNSCache addon. It is disabled by default.
Set `enabled = true` to enable.
:param 'ClusterAddonsConfigGcePersistentDiskCsiDriverConfigArgs' gce_persistent_disk_csi_driver_config: .
Whether this cluster should enable the Google Compute Engine Persistent Disk Container Storage Interface (CSI) Driver. Defaults to disabled; set `enabled = true` to enable.
:param 'ClusterAddonsConfigHorizontalPodAutoscalingArgs' horizontal_pod_autoscaling: The status of the Horizontal Pod Autoscaling
addon, which increases or decreases the number of replica pods a replication controller
has based on the resource usage of the existing pods.
It is enabled by default;
set `disabled = true` to disable.
:param 'ClusterAddonsConfigHttpLoadBalancingArgs' http_load_balancing: The status of the HTTP (L7) load balancing
controller addon, which makes it easy to set up HTTP load balancers for services in a
cluster. It is enabled by default; set `disabled = true` to disable.
:param 'ClusterAddonsConfigIstioConfigArgs' istio_config: .
Structure is documented below.
:param 'ClusterAddonsConfigKalmConfigArgs' kalm_config: .
Configuration for the KALM addon, which manages the lifecycle of k8s. It is disabled by default; Set `enabled = true` to enable.
:param 'ClusterAddonsConfigNetworkPolicyConfigArgs' network_policy_config: Whether we should enable the network policy addon
for the master. This must be enabled in order to enable network policy for the nodes.
To enable this, you must also define a `network_policy` block,
otherwise nothing will happen.
It can only be disabled if the nodes already do not have network policies enabled.
Defaults to disabled; set `disabled = false` to enable.
"""
if cloudrun_config is not None:
pulumi.set(__self__, "cloudrun_config", cloudrun_config)
if config_connector_config is not None:
pulumi.set(__self__, "config_connector_config", config_connector_config)
if dns_cache_config is not None:
pulumi.set(__self__, "dns_cache_config", dns_cache_config)
if gce_persistent_disk_csi_driver_config is not None:
pulumi.set(__self__, "gce_persistent_disk_csi_driver_config", gce_persistent_disk_csi_driver_config)
if horizontal_pod_autoscaling is not None:
pulumi.set(__self__, "horizontal_pod_autoscaling", horizontal_pod_autoscaling)
if http_load_balancing is not None:
pulumi.set(__self__, "http_load_balancing", http_load_balancing)
if istio_config is not None:
pulumi.set(__self__, "istio_config", istio_config)
if kalm_config is not None:
pulumi.set(__self__, "kalm_config", kalm_config)
if network_policy_config is not None:
pulumi.set(__self__, "network_policy_config", network_policy_config)
@property
@pulumi.getter(name="cloudrunConfig")
def cloudrun_config(self) -> Optional['outputs.ClusterAddonsConfigCloudrunConfig']:
"""
. Structure is documented below.
"""
return pulumi.get(self, "cloudrun_config")
@property
@pulumi.getter(name="configConnectorConfig")
def config_connector_config(self) -> Optional['outputs.ClusterAddonsConfigConfigConnectorConfig']:
"""
.
The status of the ConfigConnector addon. It is disabled by default; Set `enabled = true` to enable.
"""
return pulumi.get(self, "config_connector_config")
@property
@pulumi.getter(name="dnsCacheConfig")
def dns_cache_config(self) -> Optional['outputs.ClusterAddonsConfigDnsCacheConfig']:
"""
.
The status of the NodeLocal DNSCache addon. It is disabled by default.
Set `enabled = true` to enable.
"""
return pulumi.get(self, "dns_cache_config")
@property
@pulumi.getter(name="gcePersistentDiskCsiDriverConfig")
def gce_persistent_disk_csi_driver_config(self) -> Optional['outputs.ClusterAddonsConfigGcePersistentDiskCsiDriverConfig']:
"""
.
Whether this cluster should enable the Google Compute Engine Persistent Disk Container Storage Interface (CSI) Driver. Defaults to disabled; set `enabled = true` to enable.
"""
return pulumi.get(self, "gce_persistent_disk_csi_driver_config")
@property
@pulumi.getter(name="horizontalPodAutoscaling")
def horizontal_pod_autoscaling(self) -> Optional['outputs.ClusterAddonsConfigHorizontalPodAutoscaling']:
"""
The status of the Horizontal Pod Autoscaling
addon, which increases or decreases the number of replica pods a replication controller
has based on the resource usage of the existing pods.
It is enabled by default;
set `disabled = true` to disable.
"""
return pulumi.get(self, "horizontal_pod_autoscaling")
@property
@pulumi.getter(name="httpLoadBalancing")
def http_load_balancing(self) -> Optional['outputs.ClusterAddonsConfigHttpLoadBalancing']:
"""
The status of the HTTP (L7) load balancing
controller addon, which makes it easy to set up HTTP load balancers for services in a
cluster. It is enabled by default; set `disabled = true` to disable.
"""
return pulumi.get(self, "http_load_balancing")
@property
@pulumi.getter(name="istioConfig")
def istio_config(self) -> Optional['outputs.ClusterAddonsConfigIstioConfig']:
"""
.
Structure is documented below.
"""
return pulumi.get(self, "istio_config")
@property
@pulumi.getter(name="kalmConfig")
def kalm_config(self) -> Optional['outputs.ClusterAddonsConfigKalmConfig']:
"""
.
Configuration for the KALM addon, which manages the lifecycle of k8s. It is disabled by default; Set `enabled = true` to enable.
"""
return pulumi.get(self, "kalm_config")
@property
@pulumi.getter(name="networkPolicyConfig")
def network_policy_config(self) -> Optional['outputs.ClusterAddonsConfigNetworkPolicyConfig']:
"""
Whether we should enable the network policy addon
for the master. This must be enabled in order to enable network policy for the nodes.
To enable this, you must also define a `network_policy` block,
otherwise nothing will happen.
It can only be disabled if the nodes already do not have network policies enabled.
Defaults to disabled; set `disabled = false` to enable.
"""
return pulumi.get(self, "network_policy_config")
@pulumi.output_type
class ClusterAddonsConfigCloudrunConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "loadBalancerType":
suggest = "load_balancer_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterAddonsConfigCloudrunConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterAddonsConfigCloudrunConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterAddonsConfigCloudrunConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
disabled: bool,
load_balancer_type: Optional[str] = None):
"""
:param bool disabled: The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
:param str load_balancer_type: The load balancer type of CloudRun ingress service. It is external load balancer by default.
Set `load_balancer_type=LOAD_BALANCER_TYPE_INTERNAL` to configure it as internal load balancer.
"""
pulumi.set(__self__, "disabled", disabled)
if load_balancer_type is not None:
pulumi.set(__self__, "load_balancer_type", load_balancer_type)
@property
@pulumi.getter
def disabled(self) -> bool:
"""
The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="loadBalancerType")
def load_balancer_type(self) -> Optional[str]:
"""
The load balancer type of CloudRun ingress service. It is external load balancer by default.
Set `load_balancer_type=LOAD_BALANCER_TYPE_INTERNAL` to configure it as internal load balancer.
"""
return pulumi.get(self, "load_balancer_type")
@pulumi.output_type
class ClusterAddonsConfigConfigConnectorConfig(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterAddonsConfigDnsCacheConfig(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterAddonsConfigGcePersistentDiskCsiDriverConfig(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterAddonsConfigHorizontalPodAutoscaling(dict):
def __init__(__self__, *,
disabled: bool):
"""
:param bool disabled: The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def disabled(self) -> bool:
"""
The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
return pulumi.get(self, "disabled")
@pulumi.output_type
class ClusterAddonsConfigHttpLoadBalancing(dict):
def __init__(__self__, *,
disabled: bool):
"""
:param bool disabled: The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def disabled(self) -> bool:
"""
The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
return pulumi.get(self, "disabled")
@pulumi.output_type
class ClusterAddonsConfigIstioConfig(dict):
def __init__(__self__, *,
disabled: bool,
auth: Optional[str] = None):
"""
:param bool disabled: The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
:param str auth: The authentication type between services in Istio. Available options include `AUTH_MUTUAL_TLS`.
"""
pulumi.set(__self__, "disabled", disabled)
if auth is not None:
pulumi.set(__self__, "auth", auth)
@property
@pulumi.getter
def disabled(self) -> bool:
"""
The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter
def auth(self) -> Optional[str]:
"""
The authentication type between services in Istio. Available options include `AUTH_MUTUAL_TLS`.
"""
return pulumi.get(self, "auth")
@pulumi.output_type
class ClusterAddonsConfigKalmConfig(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterAddonsConfigNetworkPolicyConfig(dict):
def __init__(__self__, *,
disabled: bool):
"""
:param bool disabled: The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def disabled(self) -> bool:
"""
The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
return pulumi.get(self, "disabled")
@pulumi.output_type
class ClusterAuthenticatorGroupsConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "securityGroup":
suggest = "security_group"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterAuthenticatorGroupsConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterAuthenticatorGroupsConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterAuthenticatorGroupsConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
security_group: str):
"""
:param str security_group: The name of the RBAC security group for use with Google security groups in Kubernetes RBAC. Group name must be in format `gke-security-groups@yourdomain.com`.
"""
pulumi.set(__self__, "security_group", security_group)
@property
@pulumi.getter(name="securityGroup")
def security_group(self) -> str:
"""
The name of the RBAC security group for use with Google security groups in Kubernetes RBAC. Group name must be in format `gke-security-groups@yourdomain.com`.
"""
return pulumi.get(self, "security_group")
@pulumi.output_type
class ClusterClusterAutoscaling(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "autoProvisioningDefaults":
suggest = "auto_provisioning_defaults"
elif key == "autoscalingProfile":
suggest = "autoscaling_profile"
elif key == "resourceLimits":
suggest = "resource_limits"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterClusterAutoscaling. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterClusterAutoscaling.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterClusterAutoscaling.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enabled: bool,
auto_provisioning_defaults: Optional['outputs.ClusterClusterAutoscalingAutoProvisioningDefaults'] = None,
autoscaling_profile: Optional[str] = None,
resource_limits: Optional[Sequence['outputs.ClusterClusterAutoscalingResourceLimit']] = None):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
:param 'ClusterClusterAutoscalingAutoProvisioningDefaultsArgs' auto_provisioning_defaults: Contains defaults for a node pool created by NAP.
Structure is documented below.
:param str autoscaling_profile: ) Configuration
options for the [Autoscaling profile](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-autoscaler#autoscaling_profiles)
feature, which lets you choose whether the cluster autoscaler should optimize for resource utilization or resource availability
when deciding to remove nodes from a cluster. Can be `BALANCED` or `OPTIMIZE_UTILIZATION`. Defaults to `BALANCED`.
:param Sequence['ClusterClusterAutoscalingResourceLimitArgs'] resource_limits: Global constraints for machine resources in the
cluster. Configuring the `cpu` and `memory` types is required if node
auto-provisioning is enabled. These limits will apply to node pool autoscaling
in addition to node auto-provisioning. Structure is documented below.
"""
pulumi.set(__self__, "enabled", enabled)
if auto_provisioning_defaults is not None:
pulumi.set(__self__, "auto_provisioning_defaults", auto_provisioning_defaults)
if autoscaling_profile is not None:
pulumi.set(__self__, "autoscaling_profile", autoscaling_profile)
if resource_limits is not None:
pulumi.set(__self__, "resource_limits", resource_limits)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="autoProvisioningDefaults")
def auto_provisioning_defaults(self) -> Optional['outputs.ClusterClusterAutoscalingAutoProvisioningDefaults']:
"""
Contains defaults for a node pool created by NAP.
Structure is documented below.
"""
return pulumi.get(self, "auto_provisioning_defaults")
@property
@pulumi.getter(name="autoscalingProfile")
def autoscaling_profile(self) -> Optional[str]:
"""
) Configuration
options for the [Autoscaling profile](https://cloud.google.com/kubernetes-engine/docs/concepts/cluster-autoscaler#autoscaling_profiles)
feature, which lets you choose whether the cluster autoscaler should optimize for resource utilization or resource availability
when deciding to remove nodes from a cluster. Can be `BALANCED` or `OPTIMIZE_UTILIZATION`. Defaults to `BALANCED`.
"""
return pulumi.get(self, "autoscaling_profile")
@property
@pulumi.getter(name="resourceLimits")
def resource_limits(self) -> Optional[Sequence['outputs.ClusterClusterAutoscalingResourceLimit']]:
"""
Global constraints for machine resources in the
cluster. Configuring the `cpu` and `memory` types is required if node
auto-provisioning is enabled. These limits will apply to node pool autoscaling
in addition to node auto-provisioning. Structure is documented below.
"""
return pulumi.get(self, "resource_limits")
@pulumi.output_type
class ClusterClusterAutoscalingAutoProvisioningDefaults(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "minCpuPlatform":
suggest = "min_cpu_platform"
elif key == "oauthScopes":
suggest = "oauth_scopes"
elif key == "serviceAccount":
suggest = "service_account"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterClusterAutoscalingAutoProvisioningDefaults. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterClusterAutoscalingAutoProvisioningDefaults.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterClusterAutoscalingAutoProvisioningDefaults.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
min_cpu_platform: Optional[str] = None,
oauth_scopes: Optional[Sequence[str]] = None,
service_account: Optional[str] = None):
"""
:param str min_cpu_platform: Minimum CPU platform to be used by this instance.
The instance may be scheduled on the specified or newer CPU platform. Applicable
values are the friendly names of CPU platforms, such as `Intel Haswell`. See the
[official documentation](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
for more information.
:param Sequence[str] oauth_scopes: The set of Google API scopes to be made available
on all of the node VMs under the "default" service account.
Use the "https://www.googleapis.com/auth/cloud-platform" scope to grant access to all APIs. It is recommended that you set `service_account` to a non-default service account and grant IAM roles to that service account for only the resources that it needs.
:param str service_account: The service account to be used by the Node VMs.
If not specified, the "default" service account is used.
"""
if min_cpu_platform is not None:
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
if oauth_scopes is not None:
pulumi.set(__self__, "oauth_scopes", oauth_scopes)
if service_account is not None:
pulumi.set(__self__, "service_account", service_account)
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> Optional[str]:
"""
Minimum CPU platform to be used by this instance.
The instance may be scheduled on the specified or newer CPU platform. Applicable
values are the friendly names of CPU platforms, such as `Intel Haswell`. See the
[official documentation](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
for more information.
"""
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter(name="oauthScopes")
def oauth_scopes(self) -> Optional[Sequence[str]]:
"""
The set of Google API scopes to be made available
on all of the node VMs under the "default" service account.
Use the "https://www.googleapis.com/auth/cloud-platform" scope to grant access to all APIs. It is recommended that you set `service_account` to a non-default service account and grant IAM roles to that service account for only the resources that it needs.
"""
return pulumi.get(self, "oauth_scopes")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> Optional[str]:
"""
The service account to be used by the Node VMs.
If not specified, the "default" service account is used.
"""
return pulumi.get(self, "service_account")
@pulumi.output_type
class ClusterClusterAutoscalingResourceLimit(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "resourceType":
suggest = "resource_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterClusterAutoscalingResourceLimit. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterClusterAutoscalingResourceLimit.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterClusterAutoscalingResourceLimit.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
resource_type: str,
maximum: Optional[int] = None,
minimum: Optional[int] = None):
"""
:param str resource_type: The type of the resource. For example, `cpu` and
`memory`. See the [guide to using Node Auto-Provisioning](https://cloud.google.com/kubernetes-engine/docs/how-to/node-auto-provisioning)
for a list of types.
:param int maximum: Maximum amount of the resource in the cluster.
:param int minimum: Minimum amount of the resource in the cluster.
"""
pulumi.set(__self__, "resource_type", resource_type)
if maximum is not None:
pulumi.set(__self__, "maximum", maximum)
if minimum is not None:
pulumi.set(__self__, "minimum", minimum)
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> str:
"""
The type of the resource. For example, `cpu` and
`memory`. See the [guide to using Node Auto-Provisioning](https://cloud.google.com/kubernetes-engine/docs/how-to/node-auto-provisioning)
for a list of types.
"""
return pulumi.get(self, "resource_type")
@property
@pulumi.getter
def maximum(self) -> Optional[int]:
"""
Maximum amount of the resource in the cluster.
"""
return pulumi.get(self, "maximum")
@property
@pulumi.getter
def minimum(self) -> Optional[int]:
"""
Minimum amount of the resource in the cluster.
"""
return pulumi.get(self, "minimum")
@pulumi.output_type
class ClusterClusterTelemetry(dict):
def __init__(__self__, *,
type: str):
"""
:param str type: The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> str:
"""
The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
@pulumi.output_type
class ClusterConfidentialNodes(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterDatabaseEncryption(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "keyName":
suggest = "key_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterDatabaseEncryption. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterDatabaseEncryption.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterDatabaseEncryption.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
state: str,
key_name: Optional[str] = None):
"""
:param str state: `ENCRYPTED` or `DECRYPTED`
:param str key_name: the key to use to encrypt/decrypt secrets. See the [DatabaseEncryption definition](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1beta1/projects.locations.clusters#Cluster.DatabaseEncryption) for more information.
"""
pulumi.set(__self__, "state", state)
if key_name is not None:
pulumi.set(__self__, "key_name", key_name)
@property
@pulumi.getter
def state(self) -> str:
"""
`ENCRYPTED` or `DECRYPTED`
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="keyName")
def key_name(self) -> Optional[str]:
"""
the key to use to encrypt/decrypt secrets. See the [DatabaseEncryption definition](https://cloud.google.com/kubernetes-engine/docs/reference/rest/v1beta1/projects.locations.clusters#Cluster.DatabaseEncryption) for more information.
"""
return pulumi.get(self, "key_name")
@pulumi.output_type
class ClusterDefaultSnatStatus(dict):
def __init__(__self__, *,
disabled: bool):
"""
:param bool disabled: The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def disabled(self) -> bool:
"""
The status of the Istio addon, which makes it easy to set up Istio for services in a
cluster. It is disabled by default. Set `disabled = false` to enable.
"""
return pulumi.get(self, "disabled")
@pulumi.output_type
class ClusterDnsConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "clusterDns":
suggest = "cluster_dns"
elif key == "clusterDnsDomain":
suggest = "cluster_dns_domain"
elif key == "clusterDnsScope":
suggest = "cluster_dns_scope"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterDnsConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterDnsConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterDnsConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cluster_dns: Optional[str] = None,
cluster_dns_domain: Optional[str] = None,
cluster_dns_scope: Optional[str] = None):
"""
:param str cluster_dns: Which in-cluster DNS provider should be used. `PROVIDER_UNSPECIFIED` (default) or `PLATFORM_DEFAULT` or `CLOUD_DNS`.
:param str cluster_dns_domain: The suffix used for all cluster service records.
:param str cluster_dns_scope: The scope of access to cluster DNS records. `DNS_SCOPE_UNSPECIFIED` (default) or `CLUSTER_SCOPE` or `VPC_SCOPE`.
"""
if cluster_dns is not None:
pulumi.set(__self__, "cluster_dns", cluster_dns)
if cluster_dns_domain is not None:
pulumi.set(__self__, "cluster_dns_domain", cluster_dns_domain)
if cluster_dns_scope is not None:
pulumi.set(__self__, "cluster_dns_scope", cluster_dns_scope)
@property
@pulumi.getter(name="clusterDns")
def cluster_dns(self) -> Optional[str]:
"""
Which in-cluster DNS provider should be used. `PROVIDER_UNSPECIFIED` (default) or `PLATFORM_DEFAULT` or `CLOUD_DNS`.
"""
return pulumi.get(self, "cluster_dns")
@property
@pulumi.getter(name="clusterDnsDomain")
def cluster_dns_domain(self) -> Optional[str]:
"""
The suffix used for all cluster service records.
"""
return pulumi.get(self, "cluster_dns_domain")
@property
@pulumi.getter(name="clusterDnsScope")
def cluster_dns_scope(self) -> Optional[str]:
"""
The scope of access to cluster DNS records. `DNS_SCOPE_UNSPECIFIED` (default) or `CLUSTER_SCOPE` or `VPC_SCOPE`.
"""
return pulumi.get(self, "cluster_dns_scope")
@pulumi.output_type
class ClusterIpAllocationPolicy(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "clusterIpv4CidrBlock":
suggest = "cluster_ipv4_cidr_block"
elif key == "clusterSecondaryRangeName":
suggest = "cluster_secondary_range_name"
elif key == "servicesIpv4CidrBlock":
suggest = "services_ipv4_cidr_block"
elif key == "servicesSecondaryRangeName":
suggest = "services_secondary_range_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterIpAllocationPolicy. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterIpAllocationPolicy.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterIpAllocationPolicy.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cluster_ipv4_cidr_block: Optional[str] = None,
cluster_secondary_range_name: Optional[str] = None,
services_ipv4_cidr_block: Optional[str] = None,
services_secondary_range_name: Optional[str] = None):
"""
:param str cluster_ipv4_cidr_block: The IP address range for the cluster pod IPs.
Set to blank to have a range chosen with the default size. Set to /netmask (e.g. /14)
to have a range chosen with a specific netmask. Set to a CIDR notation (e.g. 10.96.0.0/14)
from the RFC-1918 private networks (e.g. 10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16) to
pick a specific range to use.
:param str cluster_secondary_range_name: The name of the existing secondary
range in the cluster's subnetwork to use for pod IP addresses. Alternatively,
`cluster_ipv4_cidr_block` can be used to automatically create a GKE-managed one.
:param str services_ipv4_cidr_block: The IP address range of the services IPs in this cluster.
Set to blank to have a range chosen with the default size. Set to /netmask (e.g. /14)
to have a range chosen with a specific netmask. Set to a CIDR notation (e.g. 10.96.0.0/14)
from the RFC-1918 private networks (e.g. 10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16) to
pick a specific range to use.
:param str services_secondary_range_name: The name of the existing
secondary range in the cluster's subnetwork to use for service `ClusterIP`s.
Alternatively, `services_ipv4_cidr_block` can be used to automatically create a
GKE-managed one.
"""
if cluster_ipv4_cidr_block is not None:
pulumi.set(__self__, "cluster_ipv4_cidr_block", cluster_ipv4_cidr_block)
if cluster_secondary_range_name is not None:
pulumi.set(__self__, "cluster_secondary_range_name", cluster_secondary_range_name)
if services_ipv4_cidr_block is not None:
pulumi.set(__self__, "services_ipv4_cidr_block", services_ipv4_cidr_block)
if services_secondary_range_name is not None:
pulumi.set(__self__, "services_secondary_range_name", services_secondary_range_name)
@property
@pulumi.getter(name="clusterIpv4CidrBlock")
def cluster_ipv4_cidr_block(self) -> Optional[str]:
"""
The IP address range for the cluster pod IPs.
Set to blank to have a range chosen with the default size. Set to /netmask (e.g. /14)
to have a range chosen with a specific netmask. Set to a CIDR notation (e.g. 10.96.0.0/14)
from the RFC-1918 private networks (e.g. 10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16) to
pick a specific range to use.
"""
return pulumi.get(self, "cluster_ipv4_cidr_block")
@property
@pulumi.getter(name="clusterSecondaryRangeName")
def cluster_secondary_range_name(self) -> Optional[str]:
"""
The name of the existing secondary
range in the cluster's subnetwork to use for pod IP addresses. Alternatively,
`cluster_ipv4_cidr_block` can be used to automatically create a GKE-managed one.
"""
return pulumi.get(self, "cluster_secondary_range_name")
@property
@pulumi.getter(name="servicesIpv4CidrBlock")
def services_ipv4_cidr_block(self) -> Optional[str]:
"""
The IP address range of the services IPs in this cluster.
Set to blank to have a range chosen with the default size. Set to /netmask (e.g. /14)
to have a range chosen with a specific netmask. Set to a CIDR notation (e.g. 10.96.0.0/14)
from the RFC-1918 private networks (e.g. 10.0.0.0/8, 172.16.0.0/12, 192.168.0.0/16) to
pick a specific range to use.
"""
return pulumi.get(self, "services_ipv4_cidr_block")
@property
@pulumi.getter(name="servicesSecondaryRangeName")
def services_secondary_range_name(self) -> Optional[str]:
"""
The name of the existing
secondary range in the cluster's subnetwork to use for service `ClusterIP`s.
Alternatively, `services_ipv4_cidr_block` can be used to automatically create a
GKE-managed one.
"""
return pulumi.get(self, "services_secondary_range_name")
@pulumi.output_type
class ClusterLoggingConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enableComponents":
suggest = "enable_components"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterLoggingConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterLoggingConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterLoggingConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_components: Sequence[str]):
"""
:param Sequence[str] enable_components: The GKE components exposing logs. `SYSTEM_COMPONENTS` and in beta provider, both `SYSTEM_COMPONENTS` and `WORKLOADS` are supported.
"""
pulumi.set(__self__, "enable_components", enable_components)
@property
@pulumi.getter(name="enableComponents")
def enable_components(self) -> Sequence[str]:
"""
The GKE components exposing logs. `SYSTEM_COMPONENTS` and in beta provider, both `SYSTEM_COMPONENTS` and `WORKLOADS` are supported.
"""
return pulumi.get(self, "enable_components")
@pulumi.output_type
class ClusterMaintenancePolicy(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "dailyMaintenanceWindow":
suggest = "daily_maintenance_window"
elif key == "maintenanceExclusions":
suggest = "maintenance_exclusions"
elif key == "recurringWindow":
suggest = "recurring_window"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMaintenancePolicy. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMaintenancePolicy.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMaintenancePolicy.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
daily_maintenance_window: Optional['outputs.ClusterMaintenancePolicyDailyMaintenanceWindow'] = None,
maintenance_exclusions: Optional[Sequence['outputs.ClusterMaintenancePolicyMaintenanceExclusion']] = None,
recurring_window: Optional['outputs.ClusterMaintenancePolicyRecurringWindow'] = None):
"""
:param 'ClusterMaintenancePolicyDailyMaintenanceWindowArgs' daily_maintenance_window: Time window specified for daily maintenance operations.
Specify `start_time` in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) format "HH:MM”,
where HH : \[00-23\] and MM : \[00-59\] GMT. For example:
:param Sequence['ClusterMaintenancePolicyMaintenanceExclusionArgs'] maintenance_exclusions: Exceptions to maintenance window. Non-emergency maintenance should not occur in these windows. A cluster can have up to three maintenance exclusions at a time [Maintenance Window and Exclusions](https://cloud.google.com/kubernetes-engine/docs/concepts/maintenance-windows-and-exclusions)
:param 'ClusterMaintenancePolicyRecurringWindowArgs' recurring_window: Time window for recurring maintenance operations.
"""
if daily_maintenance_window is not None:
pulumi.set(__self__, "daily_maintenance_window", daily_maintenance_window)
if maintenance_exclusions is not None:
pulumi.set(__self__, "maintenance_exclusions", maintenance_exclusions)
if recurring_window is not None:
pulumi.set(__self__, "recurring_window", recurring_window)
@property
@pulumi.getter(name="dailyMaintenanceWindow")
def daily_maintenance_window(self) -> Optional['outputs.ClusterMaintenancePolicyDailyMaintenanceWindow']:
"""
Time window specified for daily maintenance operations.
Specify `start_time` in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) format "HH:MM”,
where HH : \[00-23\] and MM : \[00-59\] GMT. For example:
"""
return pulumi.get(self, "daily_maintenance_window")
@property
@pulumi.getter(name="maintenanceExclusions")
def maintenance_exclusions(self) -> Optional[Sequence['outputs.ClusterMaintenancePolicyMaintenanceExclusion']]:
"""
Exceptions to maintenance window. Non-emergency maintenance should not occur in these windows. A cluster can have up to three maintenance exclusions at a time [Maintenance Window and Exclusions](https://cloud.google.com/kubernetes-engine/docs/concepts/maintenance-windows-and-exclusions)
"""
return pulumi.get(self, "maintenance_exclusions")
@property
@pulumi.getter(name="recurringWindow")
def recurring_window(self) -> Optional['outputs.ClusterMaintenancePolicyRecurringWindow']:
"""
Time window for recurring maintenance operations.
"""
return pulumi.get(self, "recurring_window")
@pulumi.output_type
class ClusterMaintenancePolicyDailyMaintenanceWindow(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "startTime":
suggest = "start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMaintenancePolicyDailyMaintenanceWindow. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMaintenancePolicyDailyMaintenanceWindow.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMaintenancePolicyDailyMaintenanceWindow.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
start_time: str,
duration: Optional[str] = None):
pulumi.set(__self__, "start_time", start_time)
if duration is not None:
pulumi.set(__self__, "duration", duration)
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
return pulumi.get(self, "start_time")
@property
@pulumi.getter
def duration(self) -> Optional[str]:
return pulumi.get(self, "duration")
@pulumi.output_type
class ClusterMaintenancePolicyMaintenanceExclusion(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endTime":
suggest = "end_time"
elif key == "exclusionName":
suggest = "exclusion_name"
elif key == "startTime":
suggest = "start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMaintenancePolicyMaintenanceExclusion. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMaintenancePolicyMaintenanceExclusion.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMaintenancePolicyMaintenanceExclusion.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
end_time: str,
exclusion_name: str,
start_time: str):
pulumi.set(__self__, "end_time", end_time)
pulumi.set(__self__, "exclusion_name", exclusion_name)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="endTime")
def end_time(self) -> str:
return pulumi.get(self, "end_time")
@property
@pulumi.getter(name="exclusionName")
def exclusion_name(self) -> str:
return pulumi.get(self, "exclusion_name")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
return pulumi.get(self, "start_time")
@pulumi.output_type
class ClusterMaintenancePolicyRecurringWindow(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "endTime":
suggest = "end_time"
elif key == "startTime":
suggest = "start_time"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMaintenancePolicyRecurringWindow. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMaintenancePolicyRecurringWindow.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMaintenancePolicyRecurringWindow.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
end_time: str,
recurrence: str,
start_time: str):
pulumi.set(__self__, "end_time", end_time)
pulumi.set(__self__, "recurrence", recurrence)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="endTime")
def end_time(self) -> str:
return pulumi.get(self, "end_time")
@property
@pulumi.getter
def recurrence(self) -> str:
return pulumi.get(self, "recurrence")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
return pulumi.get(self, "start_time")
@pulumi.output_type
class ClusterMasterAuth(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "clientCertificateConfig":
suggest = "client_certificate_config"
elif key == "clientCertificate":
suggest = "client_certificate"
elif key == "clientKey":
suggest = "client_key"
elif key == "clusterCaCertificate":
suggest = "cluster_ca_certificate"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMasterAuth. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMasterAuth.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMasterAuth.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
client_certificate_config: 'outputs.ClusterMasterAuthClientCertificateConfig',
client_certificate: Optional[str] = None,
client_key: Optional[str] = None,
cluster_ca_certificate: Optional[str] = None):
"""
:param 'ClusterMasterAuthClientCertificateConfigArgs' client_certificate_config: Whether client certificate authorization is enabled for this cluster. For example:
"""
pulumi.set(__self__, "client_certificate_config", client_certificate_config)
if client_certificate is not None:
pulumi.set(__self__, "client_certificate", client_certificate)
if client_key is not None:
pulumi.set(__self__, "client_key", client_key)
if cluster_ca_certificate is not None:
pulumi.set(__self__, "cluster_ca_certificate", cluster_ca_certificate)
@property
@pulumi.getter(name="clientCertificateConfig")
def client_certificate_config(self) -> 'outputs.ClusterMasterAuthClientCertificateConfig':
"""
Whether client certificate authorization is enabled for this cluster. For example:
"""
return pulumi.get(self, "client_certificate_config")
@property
@pulumi.getter(name="clientCertificate")
def client_certificate(self) -> Optional[str]:
return pulumi.get(self, "client_certificate")
@property
@pulumi.getter(name="clientKey")
def client_key(self) -> Optional[str]:
return pulumi.get(self, "client_key")
@property
@pulumi.getter(name="clusterCaCertificate")
def cluster_ca_certificate(self) -> Optional[str]:
return pulumi.get(self, "cluster_ca_certificate")
@pulumi.output_type
class ClusterMasterAuthClientCertificateConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "issueClientCertificate":
suggest = "issue_client_certificate"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMasterAuthClientCertificateConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMasterAuthClientCertificateConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMasterAuthClientCertificateConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
issue_client_certificate: bool):
pulumi.set(__self__, "issue_client_certificate", issue_client_certificate)
@property
@pulumi.getter(name="issueClientCertificate")
def issue_client_certificate(self) -> bool:
return pulumi.get(self, "issue_client_certificate")
@pulumi.output_type
class ClusterMasterAuthorizedNetworksConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cidrBlocks":
suggest = "cidr_blocks"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMasterAuthorizedNetworksConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMasterAuthorizedNetworksConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMasterAuthorizedNetworksConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cidr_blocks: Optional[Sequence['outputs.ClusterMasterAuthorizedNetworksConfigCidrBlock']] = None):
"""
:param Sequence['ClusterMasterAuthorizedNetworksConfigCidrBlockArgs'] cidr_blocks: External networks that can access the
Kubernetes cluster master through HTTPS.
"""
if cidr_blocks is not None:
pulumi.set(__self__, "cidr_blocks", cidr_blocks)
@property
@pulumi.getter(name="cidrBlocks")
def cidr_blocks(self) -> Optional[Sequence['outputs.ClusterMasterAuthorizedNetworksConfigCidrBlock']]:
"""
External networks that can access the
Kubernetes cluster master through HTTPS.
"""
return pulumi.get(self, "cidr_blocks")
@pulumi.output_type
class ClusterMasterAuthorizedNetworksConfigCidrBlock(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cidrBlock":
suggest = "cidr_block"
elif key == "displayName":
suggest = "display_name"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMasterAuthorizedNetworksConfigCidrBlock. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMasterAuthorizedNetworksConfigCidrBlock.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMasterAuthorizedNetworksConfigCidrBlock.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cidr_block: str,
display_name: Optional[str] = None):
"""
:param str cidr_block: External network that can access Kubernetes master through HTTPS.
Must be specified in CIDR notation.
:param str display_name: Field for users to identify CIDR blocks.
"""
pulumi.set(__self__, "cidr_block", cidr_block)
if display_name is not None:
pulumi.set(__self__, "display_name", display_name)
@property
@pulumi.getter(name="cidrBlock")
def cidr_block(self) -> str:
"""
External network that can access Kubernetes master through HTTPS.
Must be specified in CIDR notation.
"""
return pulumi.get(self, "cidr_block")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
Field for users to identify CIDR blocks.
"""
return pulumi.get(self, "display_name")
@pulumi.output_type
class ClusterMonitoringConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enableComponents":
suggest = "enable_components"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterMonitoringConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterMonitoringConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterMonitoringConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_components: Sequence[str]):
"""
:param Sequence[str] enable_components: The GKE components exposing logs. `SYSTEM_COMPONENTS` and in beta provider, both `SYSTEM_COMPONENTS` and `WORKLOADS` are supported.
"""
pulumi.set(__self__, "enable_components", enable_components)
@property
@pulumi.getter(name="enableComponents")
def enable_components(self) -> Sequence[str]:
"""
The GKE components exposing logs. `SYSTEM_COMPONENTS` and in beta provider, both `SYSTEM_COMPONENTS` and `WORKLOADS` are supported.
"""
return pulumi.get(self, "enable_components")
@pulumi.output_type
class ClusterNetworkPolicy(dict):
def __init__(__self__, *,
enabled: bool,
provider: Optional[str] = None):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
:param str provider: The selected network policy provider. Defaults to PROVIDER_UNSPECIFIED.
"""
pulumi.set(__self__, "enabled", enabled)
if provider is not None:
pulumi.set(__self__, "provider", provider)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def provider(self) -> Optional[str]:
"""
The selected network policy provider. Defaults to PROVIDER_UNSPECIFIED.
"""
return pulumi.get(self, "provider")
@pulumi.output_type
class ClusterNodeConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "bootDiskKmsKey":
suggest = "boot_disk_kms_key"
elif key == "diskSizeGb":
suggest = "disk_size_gb"
elif key == "diskType":
suggest = "disk_type"
elif key == "ephemeralStorageConfig":
suggest = "ephemeral_storage_config"
elif key == "gcfsConfig":
suggest = "gcfs_config"
elif key == "guestAccelerators":
suggest = "guest_accelerators"
elif key == "imageType":
suggest = "image_type"
elif key == "kubeletConfig":
suggest = "kubelet_config"
elif key == "linuxNodeConfig":
suggest = "linux_node_config"
elif key == "localSsdCount":
suggest = "local_ssd_count"
elif key == "machineType":
suggest = "machine_type"
elif key == "minCpuPlatform":
suggest = "min_cpu_platform"
elif key == "nodeGroup":
suggest = "node_group"
elif key == "oauthScopes":
suggest = "oauth_scopes"
elif key == "sandboxConfig":
suggest = "sandbox_config"
elif key == "serviceAccount":
suggest = "service_account"
elif key == "shieldedInstanceConfig":
suggest = "shielded_instance_config"
elif key == "workloadMetadataConfig":
suggest = "workload_metadata_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodeConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodeConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodeConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
boot_disk_kms_key: Optional[str] = None,
disk_size_gb: Optional[int] = None,
disk_type: Optional[str] = None,
ephemeral_storage_config: Optional['outputs.ClusterNodeConfigEphemeralStorageConfig'] = None,
gcfs_config: Optional['outputs.ClusterNodeConfigGcfsConfig'] = None,
guest_accelerators: Optional[Sequence['outputs.ClusterNodeConfigGuestAccelerator']] = None,
image_type: Optional[str] = None,
kubelet_config: Optional['outputs.ClusterNodeConfigKubeletConfig'] = None,
labels: Optional[Mapping[str, str]] = None,
linux_node_config: Optional['outputs.ClusterNodeConfigLinuxNodeConfig'] = None,
local_ssd_count: Optional[int] = None,
machine_type: Optional[str] = None,
metadata: Optional[Mapping[str, str]] = None,
min_cpu_platform: Optional[str] = None,
node_group: Optional[str] = None,
oauth_scopes: Optional[Sequence[str]] = None,
preemptible: Optional[bool] = None,
sandbox_config: Optional['outputs.ClusterNodeConfigSandboxConfig'] = None,
service_account: Optional[str] = None,
shielded_instance_config: Optional['outputs.ClusterNodeConfigShieldedInstanceConfig'] = None,
spot: Optional[bool] = None,
tags: Optional[Sequence[str]] = None,
taints: Optional[Sequence['outputs.ClusterNodeConfigTaint']] = None,
workload_metadata_config: Optional['outputs.ClusterNodeConfigWorkloadMetadataConfig'] = None):
"""
:param str boot_disk_kms_key: The Customer Managed Encryption Key used to encrypt the boot disk attached to each node in the node pool. This should be of the form projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. For more information about protecting resources with Cloud KMS Keys please see: https://cloud.google.com/compute/docs/disks/customer-managed-encryption
:param int disk_size_gb: Size of the disk attached to each node, specified
in GB. The smallest allowed disk size is 10GB. Defaults to 100GB.
:param str disk_type: Type of the disk attached to each node
(e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-standard'
:param 'ClusterNodeConfigEphemeralStorageConfigArgs' ephemeral_storage_config: Parameters for the ephemeral storage filesystem. If unspecified, ephemeral storage is backed by the boot disk. Structure is documented below.
:param 'ClusterNodeConfigGcfsConfigArgs' gcfs_config: Parameters for the Google Container Filesystem (GCFS).
If unspecified, GCFS will not be enabled on the node pool. When enabling this feature you must specify `image_type = "COS_CONTAINERD"` and `node_version` from GKE versions 1.19 or later to use it.
For GKE versions 1.19, 1.20, and 1.21, the recommended minimum `node_version` would be 1.19.15-gke.1300, 1.20.11-gke.1300, and 1.21.5-gke.1300 respectively.
A `machine_type` that has more than 16 GiB of memory is also recommended.
GCFS must be enabled in order to use [image streaming](https://cloud.google.com/kubernetes-engine/docs/how-to/image-streaming).
Structure is documented below.
:param Sequence['ClusterNodeConfigGuestAcceleratorArgs'] guest_accelerators: List of the type and count of accelerator cards attached to the instance.
Structure documented below.
:param str image_type: The image type to use for this node. Note that changing the image type
will delete and recreate all nodes in the node pool.
:param 'ClusterNodeConfigKubeletConfigArgs' kubelet_config: Kubelet configuration, currently supported attributes can be found [here](https://cloud.google.com/sdk/gcloud/reference/beta/container/node-pools/create#--system-config-from-file).
Structure is documented below.
:param Mapping[str, str] labels: The Kubernetes labels (key/value pairs) to be applied to each node. The kubernetes.io/ and k8s.io/ prefixes are
reserved by Kubernetes Core components and cannot be specified.
:param 'ClusterNodeConfigLinuxNodeConfigArgs' linux_node_config: Linux node configuration, currently supported attributes can be found [here](https://cloud.google.com/sdk/gcloud/reference/beta/container/node-pools/create#--system-config-from-file).
Note that validations happen all server side. All attributes are optional.
Structure is documented below.
:param int local_ssd_count: Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage.
:param str machine_type: The name of a Google Compute Engine machine type.
Defaults to `e2-medium`. To create a custom machine type, value should be set as specified
[here](https://cloud.google.com/compute/docs/reference/latest/instances#machineType).
:param Mapping[str, str] metadata: The metadata key/value pairs assigned to instances in
the cluster. From GKE `1.12` onwards, `disable-legacy-endpoints` is set to
`true` by the API; if `metadata` is set but that default value is not
included, the provider will attempt to unset the value. To avoid this, set the
value in your config.
:param str min_cpu_platform: Minimum CPU platform to be used by this instance.
The instance may be scheduled on the specified or newer CPU platform. Applicable
values are the friendly names of CPU platforms, such as `Intel Haswell`. See the
[official documentation](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
for more information.
:param str node_group: Setting this field will assign instances of this pool to run on the specified node group. This is useful for running workloads on [sole tenant nodes](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes).
:param Sequence[str] oauth_scopes: The set of Google API scopes to be made available
on all of the node VMs under the "default" service account.
Use the "https://www.googleapis.com/auth/cloud-platform" scope to grant access to all APIs. It is recommended that you set `service_account` to a non-default service account and grant IAM roles to that service account for only the resources that it needs.
:param bool preemptible: A boolean that represents whether or not the underlying node VMs
are preemptible. See the [official documentation](https://cloud.google.com/container-engine/docs/preemptible-vm)
for more information. Defaults to false.
:param 'ClusterNodeConfigSandboxConfigArgs' sandbox_config: ) [GKE Sandbox](https://cloud.google.com/kubernetes-engine/docs/how-to/sandbox-pods) configuration. When enabling this feature you must specify `image_type = "COS_CONTAINERD"` and `node_version = "1.12.7-gke.17"` or later to use it.
>>>>>>> v4.3.0
Structure is documented below.
:param str service_account: The service account to be used by the Node VMs.
If not specified, the "default" service account is used.
:param 'ClusterNodeConfigShieldedInstanceConfigArgs' shielded_instance_config: Shielded Instance options. Structure is documented below.
:param bool spot: ) A boolean
that represents whether the underlying node VMs are spot. See the [official documentation](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms)
for more information. Defaults to false.
:param Sequence[str] tags: The list of instance tags applied to all nodes. Tags are used to identify
valid sources or targets for network firewalls.
:param Sequence['ClusterNodeConfigTaintArgs'] taints: A list of [Kubernetes taints](https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/)
to apply to nodes. GKE's API can only set this field on cluster creation.
However, GKE will add taints to your nodes if you enable certain features such
as GPUs. If this field is set, any diffs on this field will cause the provider to
recreate the underlying resource. Taint values can be updated safely in
Kubernetes (eg. through `kubectl`), and it's recommended that you do not use
this field to manage taints. If you do, `lifecycle.ignore_changes` is
recommended. Structure is documented below.
:param 'ClusterNodeConfigWorkloadMetadataConfigArgs' workload_metadata_config: Metadata configuration to expose to workloads on the node pool.
Structure is documented below.
"""
if boot_disk_kms_key is not None:
pulumi.set(__self__, "boot_disk_kms_key", boot_disk_kms_key)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if disk_type is not None:
pulumi.set(__self__, "disk_type", disk_type)
if ephemeral_storage_config is not None:
pulumi.set(__self__, "ephemeral_storage_config", ephemeral_storage_config)
if gcfs_config is not None:
pulumi.set(__self__, "gcfs_config", gcfs_config)
if guest_accelerators is not None:
pulumi.set(__self__, "guest_accelerators", guest_accelerators)
if image_type is not None:
pulumi.set(__self__, "image_type", image_type)
if kubelet_config is not None:
pulumi.set(__self__, "kubelet_config", kubelet_config)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if linux_node_config is not None:
pulumi.set(__self__, "linux_node_config", linux_node_config)
if local_ssd_count is not None:
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
if machine_type is not None:
pulumi.set(__self__, "machine_type", machine_type)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if min_cpu_platform is not None:
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
if node_group is not None:
pulumi.set(__self__, "node_group", node_group)
if oauth_scopes is not None:
pulumi.set(__self__, "oauth_scopes", oauth_scopes)
if preemptible is not None:
pulumi.set(__self__, "preemptible", preemptible)
if sandbox_config is not None:
pulumi.set(__self__, "sandbox_config", sandbox_config)
if service_account is not None:
pulumi.set(__self__, "service_account", service_account)
if shielded_instance_config is not None:
pulumi.set(__self__, "shielded_instance_config", shielded_instance_config)
if spot is not None:
pulumi.set(__self__, "spot", spot)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if taints is not None:
pulumi.set(__self__, "taints", taints)
if workload_metadata_config is not None:
pulumi.set(__self__, "workload_metadata_config", workload_metadata_config)
@property
@pulumi.getter(name="bootDiskKmsKey")
def boot_disk_kms_key(self) -> Optional[str]:
"""
The Customer Managed Encryption Key used to encrypt the boot disk attached to each node in the node pool. This should be of the form projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. For more information about protecting resources with Cloud KMS Keys please see: https://cloud.google.com/compute/docs/disks/customer-managed-encryption
"""
return pulumi.get(self, "boot_disk_kms_key")
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> Optional[int]:
"""
Size of the disk attached to each node, specified
in GB. The smallest allowed disk size is 10GB. Defaults to 100GB.
"""
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> Optional[str]:
"""
Type of the disk attached to each node
(e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-standard'
"""
return pulumi.get(self, "disk_type")
@property
@pulumi.getter(name="ephemeralStorageConfig")
def ephemeral_storage_config(self) -> Optional['outputs.ClusterNodeConfigEphemeralStorageConfig']:
"""
Parameters for the ephemeral storage filesystem. If unspecified, ephemeral storage is backed by the boot disk. Structure is documented below.
"""
return pulumi.get(self, "ephemeral_storage_config")
@property
@pulumi.getter(name="gcfsConfig")
def gcfs_config(self) -> Optional['outputs.ClusterNodeConfigGcfsConfig']:
"""
Parameters for the Google Container Filesystem (GCFS).
If unspecified, GCFS will not be enabled on the node pool. When enabling this feature you must specify `image_type = "COS_CONTAINERD"` and `node_version` from GKE versions 1.19 or later to use it.
For GKE versions 1.19, 1.20, and 1.21, the recommended minimum `node_version` would be 1.19.15-gke.1300, 1.20.11-gke.1300, and 1.21.5-gke.1300 respectively.
A `machine_type` that has more than 16 GiB of memory is also recommended.
GCFS must be enabled in order to use [image streaming](https://cloud.google.com/kubernetes-engine/docs/how-to/image-streaming).
Structure is documented below.
"""
return pulumi.get(self, "gcfs_config")
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> Optional[Sequence['outputs.ClusterNodeConfigGuestAccelerator']]:
"""
List of the type and count of accelerator cards attached to the instance.
Structure documented below.
"""
return pulumi.get(self, "guest_accelerators")
@property
@pulumi.getter(name="imageType")
def image_type(self) -> Optional[str]:
"""
The image type to use for this node. Note that changing the image type
will delete and recreate all nodes in the node pool.
"""
return pulumi.get(self, "image_type")
@property
@pulumi.getter(name="kubeletConfig")
def kubelet_config(self) -> Optional['outputs.ClusterNodeConfigKubeletConfig']:
"""
Kubelet configuration, currently supported attributes can be found [here](https://cloud.google.com/sdk/gcloud/reference/beta/container/node-pools/create#--system-config-from-file).
Structure is documented below.
"""
return pulumi.get(self, "kubelet_config")
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, str]]:
"""
The Kubernetes labels (key/value pairs) to be applied to each node. The kubernetes.io/ and k8s.io/ prefixes are
reserved by Kubernetes Core components and cannot be specified.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="linuxNodeConfig")
def linux_node_config(self) -> Optional['outputs.ClusterNodeConfigLinuxNodeConfig']:
"""
Linux node configuration, currently supported attributes can be found [here](https://cloud.google.com/sdk/gcloud/reference/beta/container/node-pools/create#--system-config-from-file).
Note that validations happen all server side. All attributes are optional.
Structure is documented below.
"""
return pulumi.get(self, "linux_node_config")
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> Optional[int]:
"""
Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage.
"""
return pulumi.get(self, "local_ssd_count")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> Optional[str]:
"""
The name of a Google Compute Engine machine type.
Defaults to `e2-medium`. To create a custom machine type, value should be set as specified
[here](https://cloud.google.com/compute/docs/reference/latest/instances#machineType).
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter
def metadata(self) -> Optional[Mapping[str, str]]:
"""
The metadata key/value pairs assigned to instances in
the cluster. From GKE `1.12` onwards, `disable-legacy-endpoints` is set to
`true` by the API; if `metadata` is set but that default value is not
included, the provider will attempt to unset the value. To avoid this, set the
value in your config.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> Optional[str]:
"""
Minimum CPU platform to be used by this instance.
The instance may be scheduled on the specified or newer CPU platform. Applicable
values are the friendly names of CPU platforms, such as `Intel Haswell`. See the
[official documentation](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
for more information.
"""
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter(name="nodeGroup")
def node_group(self) -> Optional[str]:
"""
Setting this field will assign instances of this pool to run on the specified node group. This is useful for running workloads on [sole tenant nodes](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes).
"""
return pulumi.get(self, "node_group")
@property
@pulumi.getter(name="oauthScopes")
def oauth_scopes(self) -> Optional[Sequence[str]]:
"""
The set of Google API scopes to be made available
on all of the node VMs under the "default" service account.
Use the "https://www.googleapis.com/auth/cloud-platform" scope to grant access to all APIs. It is recommended that you set `service_account` to a non-default service account and grant IAM roles to that service account for only the resources that it needs.
"""
return pulumi.get(self, "oauth_scopes")
@property
@pulumi.getter
def preemptible(self) -> Optional[bool]:
"""
A boolean that represents whether or not the underlying node VMs
are preemptible. See the [official documentation](https://cloud.google.com/container-engine/docs/preemptible-vm)
for more information. Defaults to false.
"""
return pulumi.get(self, "preemptible")
@property
@pulumi.getter(name="sandboxConfig")
def sandbox_config(self) -> Optional['outputs.ClusterNodeConfigSandboxConfig']:
"""
) [GKE Sandbox](https://cloud.google.com/kubernetes-engine/docs/how-to/sandbox-pods) configuration. When enabling this feature you must specify `image_type = "COS_CONTAINERD"` and `node_version = "1.12.7-gke.17"` or later to use it.
>>>>>>> v4.3.0
Structure is documented below.
"""
return pulumi.get(self, "sandbox_config")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> Optional[str]:
"""
The service account to be used by the Node VMs.
If not specified, the "default" service account is used.
"""
return pulumi.get(self, "service_account")
@property
@pulumi.getter(name="shieldedInstanceConfig")
def shielded_instance_config(self) -> Optional['outputs.ClusterNodeConfigShieldedInstanceConfig']:
"""
Shielded Instance options. Structure is documented below.
"""
return pulumi.get(self, "shielded_instance_config")
@property
@pulumi.getter
def spot(self) -> Optional[bool]:
"""
) A boolean
that represents whether the underlying node VMs are spot. See the [official documentation](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms)
for more information. Defaults to false.
"""
return pulumi.get(self, "spot")
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The list of instance tags applied to all nodes. Tags are used to identify
valid sources or targets for network firewalls.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def taints(self) -> Optional[Sequence['outputs.ClusterNodeConfigTaint']]:
"""
A list of [Kubernetes taints](https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/)
to apply to nodes. GKE's API can only set this field on cluster creation.
However, GKE will add taints to your nodes if you enable certain features such
as GPUs. If this field is set, any diffs on this field will cause the provider to
recreate the underlying resource. Taint values can be updated safely in
Kubernetes (eg. through `kubectl`), and it's recommended that you do not use
this field to manage taints. If you do, `lifecycle.ignore_changes` is
recommended. Structure is documented below.
"""
return pulumi.get(self, "taints")
@property
@pulumi.getter(name="workloadMetadataConfig")
def workload_metadata_config(self) -> Optional['outputs.ClusterNodeConfigWorkloadMetadataConfig']:
"""
Metadata configuration to expose to workloads on the node pool.
Structure is documented below.
"""
return pulumi.get(self, "workload_metadata_config")
@pulumi.output_type
class ClusterNodeConfigEphemeralStorageConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "localSsdCount":
suggest = "local_ssd_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodeConfigEphemeralStorageConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodeConfigEphemeralStorageConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodeConfigEphemeralStorageConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
local_ssd_count: int):
"""
:param int local_ssd_count: Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage.
"""
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> int:
"""
Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage.
"""
return pulumi.get(self, "local_ssd_count")
@pulumi.output_type
class ClusterNodeConfigGcfsConfig(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterNodeConfigGuestAccelerator(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "gpuPartitionSize":
suggest = "gpu_partition_size"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodeConfigGuestAccelerator. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodeConfigGuestAccelerator.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodeConfigGuestAccelerator.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
count: int,
type: str,
gpu_partition_size: Optional[str] = None):
"""
:param int count: The number of the guest accelerator cards exposed to this instance.
:param str type: The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
:param str gpu_partition_size: Size of partitions to create on the GPU. Valid values are described in the NVIDIA mig [user guide](https://docs.nvidia.com/datacenter/tesla/mig-user-guide/#partitioning).
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "type", type)
if gpu_partition_size is not None:
pulumi.set(__self__, "gpu_partition_size", gpu_partition_size)
@property
@pulumi.getter
def count(self) -> int:
"""
The number of the guest accelerator cards exposed to this instance.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def type(self) -> str:
"""
The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="gpuPartitionSize")
def gpu_partition_size(self) -> Optional[str]:
"""
Size of partitions to create on the GPU. Valid values are described in the NVIDIA mig [user guide](https://docs.nvidia.com/datacenter/tesla/mig-user-guide/#partitioning).
"""
return pulumi.get(self, "gpu_partition_size")
@pulumi.output_type
class ClusterNodeConfigKubeletConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cpuManagerPolicy":
suggest = "cpu_manager_policy"
elif key == "cpuCfsQuota":
suggest = "cpu_cfs_quota"
elif key == "cpuCfsQuotaPeriod":
suggest = "cpu_cfs_quota_period"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodeConfigKubeletConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodeConfigKubeletConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodeConfigKubeletConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cpu_manager_policy: str,
cpu_cfs_quota: Optional[bool] = None,
cpu_cfs_quota_period: Optional[str] = None):
"""
:param str cpu_manager_policy: The CPU management policy on the node. See
[K8S CPU Management Policies](https://kubernetes.io/docs/tasks/administer-cluster/cpu-management-policies/).
One of `"none"` or `"static"`. Defaults to `none` when `kubelet_config` is unset.
:param bool cpu_cfs_quota: If true, enables CPU CFS quota enforcement for
containers that specify CPU limits.
:param str cpu_cfs_quota_period: The CPU CFS quota period value. Specified
as a sequence of decimal numbers, each with optional fraction and a unit suffix,
such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m",
"h". The value must be a positive duration.
"""
pulumi.set(__self__, "cpu_manager_policy", cpu_manager_policy)
if cpu_cfs_quota is not None:
pulumi.set(__self__, "cpu_cfs_quota", cpu_cfs_quota)
if cpu_cfs_quota_period is not None:
pulumi.set(__self__, "cpu_cfs_quota_period", cpu_cfs_quota_period)
@property
@pulumi.getter(name="cpuManagerPolicy")
def cpu_manager_policy(self) -> str:
"""
The CPU management policy on the node. See
[K8S CPU Management Policies](https://kubernetes.io/docs/tasks/administer-cluster/cpu-management-policies/).
One of `"none"` or `"static"`. Defaults to `none` when `kubelet_config` is unset.
"""
return pulumi.get(self, "cpu_manager_policy")
@property
@pulumi.getter(name="cpuCfsQuota")
def cpu_cfs_quota(self) -> Optional[bool]:
"""
If true, enables CPU CFS quota enforcement for
containers that specify CPU limits.
"""
return pulumi.get(self, "cpu_cfs_quota")
@property
@pulumi.getter(name="cpuCfsQuotaPeriod")
def cpu_cfs_quota_period(self) -> Optional[str]:
"""
The CPU CFS quota period value. Specified
as a sequence of decimal numbers, each with optional fraction and a unit suffix,
such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m",
"h". The value must be a positive duration.
"""
return pulumi.get(self, "cpu_cfs_quota_period")
@pulumi.output_type
class ClusterNodeConfigLinuxNodeConfig(dict):
def __init__(__self__, *,
sysctls: Mapping[str, str]):
"""
:param Mapping[str, str] sysctls: The Linux kernel parameters to be applied to the nodes
and all pods running on the nodes. Specified as a map from the key, such as
`net.core.wmem_max`, to a string value.
"""
pulumi.set(__self__, "sysctls", sysctls)
@property
@pulumi.getter
def sysctls(self) -> Mapping[str, str]:
"""
The Linux kernel parameters to be applied to the nodes
and all pods running on the nodes. Specified as a map from the key, such as
`net.core.wmem_max`, to a string value.
"""
return pulumi.get(self, "sysctls")
@pulumi.output_type
class ClusterNodeConfigSandboxConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sandboxType":
suggest = "sandbox_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodeConfigSandboxConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodeConfigSandboxConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodeConfigSandboxConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
sandbox_type: str):
"""
:param str sandbox_type: Which sandbox to use for pods in the node pool.
Accepted values are:
"""
pulumi.set(__self__, "sandbox_type", sandbox_type)
@property
@pulumi.getter(name="sandboxType")
def sandbox_type(self) -> str:
"""
Which sandbox to use for pods in the node pool.
Accepted values are:
"""
return pulumi.get(self, "sandbox_type")
@pulumi.output_type
class ClusterNodeConfigShieldedInstanceConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enableIntegrityMonitoring":
suggest = "enable_integrity_monitoring"
elif key == "enableSecureBoot":
suggest = "enable_secure_boot"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodeConfigShieldedInstanceConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodeConfigShieldedInstanceConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodeConfigShieldedInstanceConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_integrity_monitoring: Optional[bool] = None,
enable_secure_boot: Optional[bool] = None):
"""
:param bool enable_integrity_monitoring: Defines if the instance has integrity monitoring enabled.
:param bool enable_secure_boot: Defines if the instance has Secure Boot enabled.
"""
if enable_integrity_monitoring is not None:
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
if enable_secure_boot is not None:
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> Optional[bool]:
"""
Defines if the instance has integrity monitoring enabled.
"""
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> Optional[bool]:
"""
Defines if the instance has Secure Boot enabled.
"""
return pulumi.get(self, "enable_secure_boot")
@pulumi.output_type
class ClusterNodeConfigTaint(dict):
def __init__(__self__, *,
effect: str,
key: str,
value: str):
"""
:param str effect: Effect for taint. Accepted values are `NO_SCHEDULE`, `PREFER_NO_SCHEDULE`, and `NO_EXECUTE`.
:param str key: Key for taint.
:param str value: Value for taint.
"""
pulumi.set(__self__, "effect", effect)
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def effect(self) -> str:
"""
Effect for taint. Accepted values are `NO_SCHEDULE`, `PREFER_NO_SCHEDULE`, and `NO_EXECUTE`.
"""
return pulumi.get(self, "effect")
@property
@pulumi.getter
def key(self) -> str:
"""
Key for taint.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
"""
Value for taint.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class ClusterNodeConfigWorkloadMetadataConfig(dict):
def __init__(__self__, *,
mode: str):
"""
:param str mode: How to expose the node metadata to the workload running on the node.
Accepted values are:
* UNSPECIFIED: Not Set
* GCE_METADATA: Expose all Compute Engine metadata to pods.
* GKE_METADATA: Run the GKE Metadata Server on this node. The GKE Metadata Server exposes a metadata API to workloads that is compatible with the V1 Compute Metadata APIs exposed by the Compute Engine and App Engine Metadata Servers. This feature can only be enabled if [workload identity](https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity) is enabled at the cluster level.
"""
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def mode(self) -> str:
"""
How to expose the node metadata to the workload running on the node.
Accepted values are:
* UNSPECIFIED: Not Set
* GCE_METADATA: Expose all Compute Engine metadata to pods.
* GKE_METADATA: Run the GKE Metadata Server on this node. The GKE Metadata Server exposes a metadata API to workloads that is compatible with the V1 Compute Metadata APIs exposed by the Compute Engine and App Engine Metadata Servers. This feature can only be enabled if [workload identity](https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity) is enabled at the cluster level.
"""
return pulumi.get(self, "mode")
@pulumi.output_type
class ClusterNodePool(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "initialNodeCount":
suggest = "initial_node_count"
elif key == "instanceGroupUrls":
suggest = "instance_group_urls"
elif key == "managedInstanceGroupUrls":
suggest = "managed_instance_group_urls"
elif key == "maxPodsPerNode":
suggest = "max_pods_per_node"
elif key == "namePrefix":
suggest = "name_prefix"
elif key == "networkConfig":
suggest = "network_config"
elif key == "nodeConfig":
suggest = "node_config"
elif key == "nodeCount":
suggest = "node_count"
elif key == "nodeLocations":
suggest = "node_locations"
elif key == "upgradeSettings":
suggest = "upgrade_settings"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePool. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePool.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePool.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
autoscaling: Optional['outputs.ClusterNodePoolAutoscaling'] = None,
initial_node_count: Optional[int] = None,
instance_group_urls: Optional[Sequence[str]] = None,
managed_instance_group_urls: Optional[Sequence[str]] = None,
management: Optional['outputs.ClusterNodePoolManagement'] = None,
max_pods_per_node: Optional[int] = None,
name: Optional[str] = None,
name_prefix: Optional[str] = None,
network_config: Optional['outputs.ClusterNodePoolNetworkConfig'] = None,
node_config: Optional['outputs.ClusterNodePoolNodeConfig'] = None,
node_count: Optional[int] = None,
node_locations: Optional[Sequence[str]] = None,
upgrade_settings: Optional['outputs.ClusterNodePoolUpgradeSettings'] = None,
version: Optional[str] = None):
"""
:param int initial_node_count: The number of nodes to create in this
cluster's default node pool. In regional or multi-zonal clusters, this is the
number of nodes per zone. Must be set if `node_pool` is not set. If you're using
`container.NodePool` objects with no default node pool, you'll need to
set this to a value of at least `1`, alongside setting
`remove_default_node_pool` to `true`.
:param str name: The name of the cluster, unique within the project and
location.
:param 'ClusterNodePoolNetworkConfigArgs' network_config: Configuration for
[Adding Pod IP address ranges](https://cloud.google.com/kubernetes-engine/docs/how-to/multi-pod-cidr)) to the node pool. Structure is documented below
:param 'ClusterNodePoolNodeConfigArgs' node_config: Parameters used in creating the default node pool.
Generally, this field should not be used at the same time as a
`container.NodePool` or a `node_pool` block; this configuration
manages the default node pool, which isn't recommended to be used.
Structure is documented below.
:param Sequence[str] node_locations: The list of zones in which the cluster's nodes
are located. Nodes must be in the region of their regional cluster or in the
same region as their cluster's zone for zonal clusters. If this is specified for
a zonal cluster, omit the cluster's zone.
"""
if autoscaling is not None:
pulumi.set(__self__, "autoscaling", autoscaling)
if initial_node_count is not None:
pulumi.set(__self__, "initial_node_count", initial_node_count)
if instance_group_urls is not None:
pulumi.set(__self__, "instance_group_urls", instance_group_urls)
if managed_instance_group_urls is not None:
pulumi.set(__self__, "managed_instance_group_urls", managed_instance_group_urls)
if management is not None:
pulumi.set(__self__, "management", management)
if max_pods_per_node is not None:
pulumi.set(__self__, "max_pods_per_node", max_pods_per_node)
if name is not None:
pulumi.set(__self__, "name", name)
if name_prefix is not None:
pulumi.set(__self__, "name_prefix", name_prefix)
if network_config is not None:
pulumi.set(__self__, "network_config", network_config)
if node_config is not None:
pulumi.set(__self__, "node_config", node_config)
if node_count is not None:
pulumi.set(__self__, "node_count", node_count)
if node_locations is not None:
pulumi.set(__self__, "node_locations", node_locations)
if upgrade_settings is not None:
pulumi.set(__self__, "upgrade_settings", upgrade_settings)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def autoscaling(self) -> Optional['outputs.ClusterNodePoolAutoscaling']:
return pulumi.get(self, "autoscaling")
@property
@pulumi.getter(name="initialNodeCount")
def initial_node_count(self) -> Optional[int]:
"""
The number of nodes to create in this
cluster's default node pool. In regional or multi-zonal clusters, this is the
number of nodes per zone. Must be set if `node_pool` is not set. If you're using
`container.NodePool` objects with no default node pool, you'll need to
set this to a value of at least `1`, alongside setting
`remove_default_node_pool` to `true`.
"""
return pulumi.get(self, "initial_node_count")
@property
@pulumi.getter(name="instanceGroupUrls")
def instance_group_urls(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "instance_group_urls")
@property
@pulumi.getter(name="managedInstanceGroupUrls")
def managed_instance_group_urls(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "managed_instance_group_urls")
@property
@pulumi.getter
def management(self) -> Optional['outputs.ClusterNodePoolManagement']:
return pulumi.get(self, "management")
@property
@pulumi.getter(name="maxPodsPerNode")
def max_pods_per_node(self) -> Optional[int]:
return pulumi.get(self, "max_pods_per_node")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the cluster, unique within the project and
location.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="namePrefix")
def name_prefix(self) -> Optional[str]:
return pulumi.get(self, "name_prefix")
@property
@pulumi.getter(name="networkConfig")
def network_config(self) -> Optional['outputs.ClusterNodePoolNetworkConfig']:
"""
Configuration for
[Adding Pod IP address ranges](https://cloud.google.com/kubernetes-engine/docs/how-to/multi-pod-cidr)) to the node pool. Structure is documented below
"""
return pulumi.get(self, "network_config")
@property
@pulumi.getter(name="nodeConfig")
def node_config(self) -> Optional['outputs.ClusterNodePoolNodeConfig']:
"""
Parameters used in creating the default node pool.
Generally, this field should not be used at the same time as a
`container.NodePool` or a `node_pool` block; this configuration
manages the default node pool, which isn't recommended to be used.
Structure is documented below.
"""
return pulumi.get(self, "node_config")
@property
@pulumi.getter(name="nodeCount")
def node_count(self) -> Optional[int]:
return pulumi.get(self, "node_count")
@property
@pulumi.getter(name="nodeLocations")
def node_locations(self) -> Optional[Sequence[str]]:
"""
The list of zones in which the cluster's nodes
are located. Nodes must be in the region of their regional cluster or in the
same region as their cluster's zone for zonal clusters. If this is specified for
a zonal cluster, omit the cluster's zone.
"""
return pulumi.get(self, "node_locations")
@property
@pulumi.getter(name="upgradeSettings")
def upgrade_settings(self) -> Optional['outputs.ClusterNodePoolUpgradeSettings']:
return pulumi.get(self, "upgrade_settings")
@property
@pulumi.getter
def version(self) -> Optional[str]:
return pulumi.get(self, "version")
@pulumi.output_type
class ClusterNodePoolAutoscaling(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxNodeCount":
suggest = "max_node_count"
elif key == "minNodeCount":
suggest = "min_node_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolAutoscaling. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolAutoscaling.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolAutoscaling.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
max_node_count: int,
min_node_count: int):
pulumi.set(__self__, "max_node_count", max_node_count)
pulumi.set(__self__, "min_node_count", min_node_count)
@property
@pulumi.getter(name="maxNodeCount")
def max_node_count(self) -> int:
return pulumi.get(self, "max_node_count")
@property
@pulumi.getter(name="minNodeCount")
def min_node_count(self) -> int:
return pulumi.get(self, "min_node_count")
@pulumi.output_type
class ClusterNodePoolManagement(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "autoRepair":
suggest = "auto_repair"
elif key == "autoUpgrade":
suggest = "auto_upgrade"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolManagement. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolManagement.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolManagement.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
auto_repair: Optional[bool] = None,
auto_upgrade: Optional[bool] = None):
if auto_repair is not None:
pulumi.set(__self__, "auto_repair", auto_repair)
if auto_upgrade is not None:
pulumi.set(__self__, "auto_upgrade", auto_upgrade)
@property
@pulumi.getter(name="autoRepair")
def auto_repair(self) -> Optional[bool]:
return pulumi.get(self, "auto_repair")
@property
@pulumi.getter(name="autoUpgrade")
def auto_upgrade(self) -> Optional[bool]:
return pulumi.get(self, "auto_upgrade")
@pulumi.output_type
class ClusterNodePoolNetworkConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "podRange":
suggest = "pod_range"
elif key == "createPodRange":
suggest = "create_pod_range"
elif key == "podIpv4CidrBlock":
suggest = "pod_ipv4_cidr_block"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolNetworkConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolNetworkConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolNetworkConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
pod_range: str,
create_pod_range: Optional[bool] = None,
pod_ipv4_cidr_block: Optional[str] = None):
"""
:param str pod_range: The ID of the secondary range for pod IPs. If `create_pod_range` is true, this ID is used for the new range. If `create_pod_range` is false, uses an existing secondary range with this ID.
:param bool create_pod_range: Whether to create a new range for pod IPs in this node pool. Defaults are provided for `pod_range` and `pod_ipv4_cidr_block` if they are not specified.
:param str pod_ipv4_cidr_block: The IP address range for pod IPs in this node pool. Only applicable if createPodRange is true. Set to blank to have a range chosen with the default size. Set to /netmask (e.g. /14) to have a range chosen with a specific netmask. Set to a CIDR notation (e.g. 10.96.0.0/14) to pick a specific range to use.
"""
pulumi.set(__self__, "pod_range", pod_range)
if create_pod_range is not None:
pulumi.set(__self__, "create_pod_range", create_pod_range)
if pod_ipv4_cidr_block is not None:
pulumi.set(__self__, "pod_ipv4_cidr_block", pod_ipv4_cidr_block)
@property
@pulumi.getter(name="podRange")
def pod_range(self) -> str:
"""
The ID of the secondary range for pod IPs. If `create_pod_range` is true, this ID is used for the new range. If `create_pod_range` is false, uses an existing secondary range with this ID.
"""
return pulumi.get(self, "pod_range")
@property
@pulumi.getter(name="createPodRange")
def create_pod_range(self) -> Optional[bool]:
"""
Whether to create a new range for pod IPs in this node pool. Defaults are provided for `pod_range` and `pod_ipv4_cidr_block` if they are not specified.
"""
return pulumi.get(self, "create_pod_range")
@property
@pulumi.getter(name="podIpv4CidrBlock")
def pod_ipv4_cidr_block(self) -> Optional[str]:
"""
The IP address range for pod IPs in this node pool. Only applicable if createPodRange is true. Set to blank to have a range chosen with the default size. Set to /netmask (e.g. /14) to have a range chosen with a specific netmask. Set to a CIDR notation (e.g. 10.96.0.0/14) to pick a specific range to use.
"""
return pulumi.get(self, "pod_ipv4_cidr_block")
@pulumi.output_type
class ClusterNodePoolNodeConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "bootDiskKmsKey":
suggest = "boot_disk_kms_key"
elif key == "diskSizeGb":
suggest = "disk_size_gb"
elif key == "diskType":
suggest = "disk_type"
elif key == "ephemeralStorageConfig":
suggest = "ephemeral_storage_config"
elif key == "gcfsConfig":
suggest = "gcfs_config"
elif key == "guestAccelerators":
suggest = "guest_accelerators"
elif key == "imageType":
suggest = "image_type"
elif key == "kubeletConfig":
suggest = "kubelet_config"
elif key == "linuxNodeConfig":
suggest = "linux_node_config"
elif key == "localSsdCount":
suggest = "local_ssd_count"
elif key == "machineType":
suggest = "machine_type"
elif key == "minCpuPlatform":
suggest = "min_cpu_platform"
elif key == "nodeGroup":
suggest = "node_group"
elif key == "oauthScopes":
suggest = "oauth_scopes"
elif key == "sandboxConfig":
suggest = "sandbox_config"
elif key == "serviceAccount":
suggest = "service_account"
elif key == "shieldedInstanceConfig":
suggest = "shielded_instance_config"
elif key == "workloadMetadataConfig":
suggest = "workload_metadata_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolNodeConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolNodeConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolNodeConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
boot_disk_kms_key: Optional[str] = None,
disk_size_gb: Optional[int] = None,
disk_type: Optional[str] = None,
ephemeral_storage_config: Optional['outputs.ClusterNodePoolNodeConfigEphemeralStorageConfig'] = None,
gcfs_config: Optional['outputs.ClusterNodePoolNodeConfigGcfsConfig'] = None,
guest_accelerators: Optional[Sequence['outputs.ClusterNodePoolNodeConfigGuestAccelerator']] = None,
image_type: Optional[str] = None,
kubelet_config: Optional['outputs.ClusterNodePoolNodeConfigKubeletConfig'] = None,
labels: Optional[Mapping[str, str]] = None,
linux_node_config: Optional['outputs.ClusterNodePoolNodeConfigLinuxNodeConfig'] = None,
local_ssd_count: Optional[int] = None,
machine_type: Optional[str] = None,
metadata: Optional[Mapping[str, str]] = None,
min_cpu_platform: Optional[str] = None,
node_group: Optional[str] = None,
oauth_scopes: Optional[Sequence[str]] = None,
preemptible: Optional[bool] = None,
sandbox_config: Optional['outputs.ClusterNodePoolNodeConfigSandboxConfig'] = None,
service_account: Optional[str] = None,
shielded_instance_config: Optional['outputs.ClusterNodePoolNodeConfigShieldedInstanceConfig'] = None,
spot: Optional[bool] = None,
tags: Optional[Sequence[str]] = None,
taints: Optional[Sequence['outputs.ClusterNodePoolNodeConfigTaint']] = None,
workload_metadata_config: Optional['outputs.ClusterNodePoolNodeConfigWorkloadMetadataConfig'] = None):
"""
:param str boot_disk_kms_key: The Customer Managed Encryption Key used to encrypt the boot disk attached to each node in the node pool. This should be of the form projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. For more information about protecting resources with Cloud KMS Keys please see: https://cloud.google.com/compute/docs/disks/customer-managed-encryption
:param int disk_size_gb: Size of the disk attached to each node, specified
in GB. The smallest allowed disk size is 10GB. Defaults to 100GB.
:param str disk_type: Type of the disk attached to each node
(e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-standard'
:param 'ClusterNodePoolNodeConfigEphemeralStorageConfigArgs' ephemeral_storage_config: Parameters for the ephemeral storage filesystem. If unspecified, ephemeral storage is backed by the boot disk. Structure is documented below.
:param 'ClusterNodePoolNodeConfigGcfsConfigArgs' gcfs_config: Parameters for the Google Container Filesystem (GCFS).
If unspecified, GCFS will not be enabled on the node pool. When enabling this feature you must specify `image_type = "COS_CONTAINERD"` and `node_version` from GKE versions 1.19 or later to use it.
For GKE versions 1.19, 1.20, and 1.21, the recommended minimum `node_version` would be 1.19.15-gke.1300, 1.20.11-gke.1300, and 1.21.5-gke.1300 respectively.
A `machine_type` that has more than 16 GiB of memory is also recommended.
GCFS must be enabled in order to use [image streaming](https://cloud.google.com/kubernetes-engine/docs/how-to/image-streaming).
Structure is documented below.
:param Sequence['ClusterNodePoolNodeConfigGuestAcceleratorArgs'] guest_accelerators: List of the type and count of accelerator cards attached to the instance.
Structure documented below.
:param str image_type: The image type to use for this node. Note that changing the image type
will delete and recreate all nodes in the node pool.
:param 'ClusterNodePoolNodeConfigKubeletConfigArgs' kubelet_config: Kubelet configuration, currently supported attributes can be found [here](https://cloud.google.com/sdk/gcloud/reference/beta/container/node-pools/create#--system-config-from-file).
Structure is documented below.
:param Mapping[str, str] labels: The Kubernetes labels (key/value pairs) to be applied to each node. The kubernetes.io/ and k8s.io/ prefixes are
reserved by Kubernetes Core components and cannot be specified.
:param 'ClusterNodePoolNodeConfigLinuxNodeConfigArgs' linux_node_config: Linux node configuration, currently supported attributes can be found [here](https://cloud.google.com/sdk/gcloud/reference/beta/container/node-pools/create#--system-config-from-file).
Note that validations happen all server side. All attributes are optional.
Structure is documented below.
:param int local_ssd_count: Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage.
:param str machine_type: The name of a Google Compute Engine machine type.
Defaults to `e2-medium`. To create a custom machine type, value should be set as specified
[here](https://cloud.google.com/compute/docs/reference/latest/instances#machineType).
:param Mapping[str, str] metadata: The metadata key/value pairs assigned to instances in
the cluster. From GKE `1.12` onwards, `disable-legacy-endpoints` is set to
`true` by the API; if `metadata` is set but that default value is not
included, the provider will attempt to unset the value. To avoid this, set the
value in your config.
:param str min_cpu_platform: Minimum CPU platform to be used by this instance.
The instance may be scheduled on the specified or newer CPU platform. Applicable
values are the friendly names of CPU platforms, such as `Intel Haswell`. See the
[official documentation](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
for more information.
:param str node_group: Setting this field will assign instances of this pool to run on the specified node group. This is useful for running workloads on [sole tenant nodes](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes).
:param Sequence[str] oauth_scopes: The set of Google API scopes to be made available
on all of the node VMs under the "default" service account.
Use the "https://www.googleapis.com/auth/cloud-platform" scope to grant access to all APIs. It is recommended that you set `service_account` to a non-default service account and grant IAM roles to that service account for only the resources that it needs.
:param bool preemptible: A boolean that represents whether or not the underlying node VMs
are preemptible. See the [official documentation](https://cloud.google.com/container-engine/docs/preemptible-vm)
for more information. Defaults to false.
:param 'ClusterNodePoolNodeConfigSandboxConfigArgs' sandbox_config: ) [GKE Sandbox](https://cloud.google.com/kubernetes-engine/docs/how-to/sandbox-pods) configuration. When enabling this feature you must specify `image_type = "COS_CONTAINERD"` and `node_version = "1.12.7-gke.17"` or later to use it.
>>>>>>> v4.3.0
Structure is documented below.
:param str service_account: The service account to be used by the Node VMs.
If not specified, the "default" service account is used.
:param 'ClusterNodePoolNodeConfigShieldedInstanceConfigArgs' shielded_instance_config: Shielded Instance options. Structure is documented below.
:param bool spot: ) A boolean
that represents whether the underlying node VMs are spot. See the [official documentation](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms)
for more information. Defaults to false.
:param Sequence[str] tags: The list of instance tags applied to all nodes. Tags are used to identify
valid sources or targets for network firewalls.
:param Sequence['ClusterNodePoolNodeConfigTaintArgs'] taints: A list of [Kubernetes taints](https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/)
to apply to nodes. GKE's API can only set this field on cluster creation.
However, GKE will add taints to your nodes if you enable certain features such
as GPUs. If this field is set, any diffs on this field will cause the provider to
recreate the underlying resource. Taint values can be updated safely in
Kubernetes (eg. through `kubectl`), and it's recommended that you do not use
this field to manage taints. If you do, `lifecycle.ignore_changes` is
recommended. Structure is documented below.
:param 'ClusterNodePoolNodeConfigWorkloadMetadataConfigArgs' workload_metadata_config: Metadata configuration to expose to workloads on the node pool.
Structure is documented below.
"""
if boot_disk_kms_key is not None:
pulumi.set(__self__, "boot_disk_kms_key", boot_disk_kms_key)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if disk_type is not None:
pulumi.set(__self__, "disk_type", disk_type)
if ephemeral_storage_config is not None:
pulumi.set(__self__, "ephemeral_storage_config", ephemeral_storage_config)
if gcfs_config is not None:
pulumi.set(__self__, "gcfs_config", gcfs_config)
if guest_accelerators is not None:
pulumi.set(__self__, "guest_accelerators", guest_accelerators)
if image_type is not None:
pulumi.set(__self__, "image_type", image_type)
if kubelet_config is not None:
pulumi.set(__self__, "kubelet_config", kubelet_config)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if linux_node_config is not None:
pulumi.set(__self__, "linux_node_config", linux_node_config)
if local_ssd_count is not None:
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
if machine_type is not None:
pulumi.set(__self__, "machine_type", machine_type)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if min_cpu_platform is not None:
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
if node_group is not None:
pulumi.set(__self__, "node_group", node_group)
if oauth_scopes is not None:
pulumi.set(__self__, "oauth_scopes", oauth_scopes)
if preemptible is not None:
pulumi.set(__self__, "preemptible", preemptible)
if sandbox_config is not None:
pulumi.set(__self__, "sandbox_config", sandbox_config)
if service_account is not None:
pulumi.set(__self__, "service_account", service_account)
if shielded_instance_config is not None:
pulumi.set(__self__, "shielded_instance_config", shielded_instance_config)
if spot is not None:
pulumi.set(__self__, "spot", spot)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if taints is not None:
pulumi.set(__self__, "taints", taints)
if workload_metadata_config is not None:
pulumi.set(__self__, "workload_metadata_config", workload_metadata_config)
@property
@pulumi.getter(name="bootDiskKmsKey")
def boot_disk_kms_key(self) -> Optional[str]:
"""
The Customer Managed Encryption Key used to encrypt the boot disk attached to each node in the node pool. This should be of the form projects/[KEY_PROJECT_ID]/locations/[LOCATION]/keyRings/[RING_NAME]/cryptoKeys/[KEY_NAME]. For more information about protecting resources with Cloud KMS Keys please see: https://cloud.google.com/compute/docs/disks/customer-managed-encryption
"""
return pulumi.get(self, "boot_disk_kms_key")
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> Optional[int]:
"""
Size of the disk attached to each node, specified
in GB. The smallest allowed disk size is 10GB. Defaults to 100GB.
"""
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> Optional[str]:
"""
Type of the disk attached to each node
(e.g. 'pd-standard', 'pd-balanced' or 'pd-ssd'). If unspecified, the default disk type is 'pd-standard'
"""
return pulumi.get(self, "disk_type")
@property
@pulumi.getter(name="ephemeralStorageConfig")
def ephemeral_storage_config(self) -> Optional['outputs.ClusterNodePoolNodeConfigEphemeralStorageConfig']:
"""
Parameters for the ephemeral storage filesystem. If unspecified, ephemeral storage is backed by the boot disk. Structure is documented below.
"""
return pulumi.get(self, "ephemeral_storage_config")
@property
@pulumi.getter(name="gcfsConfig")
def gcfs_config(self) -> Optional['outputs.ClusterNodePoolNodeConfigGcfsConfig']:
"""
Parameters for the Google Container Filesystem (GCFS).
If unspecified, GCFS will not be enabled on the node pool. When enabling this feature you must specify `image_type = "COS_CONTAINERD"` and `node_version` from GKE versions 1.19 or later to use it.
For GKE versions 1.19, 1.20, and 1.21, the recommended minimum `node_version` would be 1.19.15-gke.1300, 1.20.11-gke.1300, and 1.21.5-gke.1300 respectively.
A `machine_type` that has more than 16 GiB of memory is also recommended.
GCFS must be enabled in order to use [image streaming](https://cloud.google.com/kubernetes-engine/docs/how-to/image-streaming).
Structure is documented below.
"""
return pulumi.get(self, "gcfs_config")
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> Optional[Sequence['outputs.ClusterNodePoolNodeConfigGuestAccelerator']]:
"""
List of the type and count of accelerator cards attached to the instance.
Structure documented below.
"""
return pulumi.get(self, "guest_accelerators")
@property
@pulumi.getter(name="imageType")
def image_type(self) -> Optional[str]:
"""
The image type to use for this node. Note that changing the image type
will delete and recreate all nodes in the node pool.
"""
return pulumi.get(self, "image_type")
@property
@pulumi.getter(name="kubeletConfig")
def kubelet_config(self) -> Optional['outputs.ClusterNodePoolNodeConfigKubeletConfig']:
"""
Kubelet configuration, currently supported attributes can be found [here](https://cloud.google.com/sdk/gcloud/reference/beta/container/node-pools/create#--system-config-from-file).
Structure is documented below.
"""
return pulumi.get(self, "kubelet_config")
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, str]]:
"""
The Kubernetes labels (key/value pairs) to be applied to each node. The kubernetes.io/ and k8s.io/ prefixes are
reserved by Kubernetes Core components and cannot be specified.
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="linuxNodeConfig")
def linux_node_config(self) -> Optional['outputs.ClusterNodePoolNodeConfigLinuxNodeConfig']:
"""
Linux node configuration, currently supported attributes can be found [here](https://cloud.google.com/sdk/gcloud/reference/beta/container/node-pools/create#--system-config-from-file).
Note that validations happen all server side. All attributes are optional.
Structure is documented below.
"""
return pulumi.get(self, "linux_node_config")
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> Optional[int]:
"""
Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage.
"""
return pulumi.get(self, "local_ssd_count")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> Optional[str]:
"""
The name of a Google Compute Engine machine type.
Defaults to `e2-medium`. To create a custom machine type, value should be set as specified
[here](https://cloud.google.com/compute/docs/reference/latest/instances#machineType).
"""
return pulumi.get(self, "machine_type")
@property
@pulumi.getter
def metadata(self) -> Optional[Mapping[str, str]]:
"""
The metadata key/value pairs assigned to instances in
the cluster. From GKE `1.12` onwards, `disable-legacy-endpoints` is set to
`true` by the API; if `metadata` is set but that default value is not
included, the provider will attempt to unset the value. To avoid this, set the
value in your config.
"""
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> Optional[str]:
"""
Minimum CPU platform to be used by this instance.
The instance may be scheduled on the specified or newer CPU platform. Applicable
values are the friendly names of CPU platforms, such as `Intel Haswell`. See the
[official documentation](https://cloud.google.com/compute/docs/instances/specify-min-cpu-platform)
for more information.
"""
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter(name="nodeGroup")
def node_group(self) -> Optional[str]:
"""
Setting this field will assign instances of this pool to run on the specified node group. This is useful for running workloads on [sole tenant nodes](https://cloud.google.com/compute/docs/nodes/sole-tenant-nodes).
"""
return pulumi.get(self, "node_group")
@property
@pulumi.getter(name="oauthScopes")
def oauth_scopes(self) -> Optional[Sequence[str]]:
"""
The set of Google API scopes to be made available
on all of the node VMs under the "default" service account.
Use the "https://www.googleapis.com/auth/cloud-platform" scope to grant access to all APIs. It is recommended that you set `service_account` to a non-default service account and grant IAM roles to that service account for only the resources that it needs.
"""
return pulumi.get(self, "oauth_scopes")
@property
@pulumi.getter
def preemptible(self) -> Optional[bool]:
"""
A boolean that represents whether or not the underlying node VMs
are preemptible. See the [official documentation](https://cloud.google.com/container-engine/docs/preemptible-vm)
for more information. Defaults to false.
"""
return pulumi.get(self, "preemptible")
@property
@pulumi.getter(name="sandboxConfig")
def sandbox_config(self) -> Optional['outputs.ClusterNodePoolNodeConfigSandboxConfig']:
"""
) [GKE Sandbox](https://cloud.google.com/kubernetes-engine/docs/how-to/sandbox-pods) configuration. When enabling this feature you must specify `image_type = "COS_CONTAINERD"` and `node_version = "1.12.7-gke.17"` or later to use it.
>>>>>>> v4.3.0
Structure is documented below.
"""
return pulumi.get(self, "sandbox_config")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> Optional[str]:
"""
The service account to be used by the Node VMs.
If not specified, the "default" service account is used.
"""
return pulumi.get(self, "service_account")
@property
@pulumi.getter(name="shieldedInstanceConfig")
def shielded_instance_config(self) -> Optional['outputs.ClusterNodePoolNodeConfigShieldedInstanceConfig']:
"""
Shielded Instance options. Structure is documented below.
"""
return pulumi.get(self, "shielded_instance_config")
@property
@pulumi.getter
def spot(self) -> Optional[bool]:
"""
) A boolean
that represents whether the underlying node VMs are spot. See the [official documentation](https://cloud.google.com/kubernetes-engine/docs/concepts/spot-vms)
for more information. Defaults to false.
"""
return pulumi.get(self, "spot")
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
"""
The list of instance tags applied to all nodes. Tags are used to identify
valid sources or targets for network firewalls.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def taints(self) -> Optional[Sequence['outputs.ClusterNodePoolNodeConfigTaint']]:
"""
A list of [Kubernetes taints](https://kubernetes.io/docs/concepts/configuration/taint-and-toleration/)
to apply to nodes. GKE's API can only set this field on cluster creation.
However, GKE will add taints to your nodes if you enable certain features such
as GPUs. If this field is set, any diffs on this field will cause the provider to
recreate the underlying resource. Taint values can be updated safely in
Kubernetes (eg. through `kubectl`), and it's recommended that you do not use
this field to manage taints. If you do, `lifecycle.ignore_changes` is
recommended. Structure is documented below.
"""
return pulumi.get(self, "taints")
@property
@pulumi.getter(name="workloadMetadataConfig")
def workload_metadata_config(self) -> Optional['outputs.ClusterNodePoolNodeConfigWorkloadMetadataConfig']:
"""
Metadata configuration to expose to workloads on the node pool.
Structure is documented below.
"""
return pulumi.get(self, "workload_metadata_config")
@pulumi.output_type
class ClusterNodePoolNodeConfigEphemeralStorageConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "localSsdCount":
suggest = "local_ssd_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolNodeConfigEphemeralStorageConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolNodeConfigEphemeralStorageConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolNodeConfigEphemeralStorageConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
local_ssd_count: int):
"""
:param int local_ssd_count: Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage.
"""
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> int:
"""
Number of local SSDs to use to back ephemeral storage. Uses NVMe interfaces. Each local SSD is 375 GB in size. If zero, it means to disable using local SSDs as ephemeral storage.
"""
return pulumi.get(self, "local_ssd_count")
@pulumi.output_type
class ClusterNodePoolNodeConfigGcfsConfig(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterNodePoolNodeConfigGuestAccelerator(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "gpuPartitionSize":
suggest = "gpu_partition_size"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolNodeConfigGuestAccelerator. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolNodeConfigGuestAccelerator.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolNodeConfigGuestAccelerator.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
count: int,
type: str,
gpu_partition_size: Optional[str] = None):
"""
:param int count: The number of the guest accelerator cards exposed to this instance.
:param str type: The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
:param str gpu_partition_size: Size of partitions to create on the GPU. Valid values are described in the NVIDIA mig [user guide](https://docs.nvidia.com/datacenter/tesla/mig-user-guide/#partitioning).
"""
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "type", type)
if gpu_partition_size is not None:
pulumi.set(__self__, "gpu_partition_size", gpu_partition_size)
@property
@pulumi.getter
def count(self) -> int:
"""
The number of the guest accelerator cards exposed to this instance.
"""
return pulumi.get(self, "count")
@property
@pulumi.getter
def type(self) -> str:
"""
The accelerator type resource to expose to this instance. E.g. `nvidia-tesla-k80`.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="gpuPartitionSize")
def gpu_partition_size(self) -> Optional[str]:
"""
Size of partitions to create on the GPU. Valid values are described in the NVIDIA mig [user guide](https://docs.nvidia.com/datacenter/tesla/mig-user-guide/#partitioning).
"""
return pulumi.get(self, "gpu_partition_size")
@pulumi.output_type
class ClusterNodePoolNodeConfigKubeletConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cpuManagerPolicy":
suggest = "cpu_manager_policy"
elif key == "cpuCfsQuota":
suggest = "cpu_cfs_quota"
elif key == "cpuCfsQuotaPeriod":
suggest = "cpu_cfs_quota_period"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolNodeConfigKubeletConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolNodeConfigKubeletConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolNodeConfigKubeletConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cpu_manager_policy: str,
cpu_cfs_quota: Optional[bool] = None,
cpu_cfs_quota_period: Optional[str] = None):
"""
:param str cpu_manager_policy: The CPU management policy on the node. See
[K8S CPU Management Policies](https://kubernetes.io/docs/tasks/administer-cluster/cpu-management-policies/).
One of `"none"` or `"static"`. Defaults to `none` when `kubelet_config` is unset.
:param bool cpu_cfs_quota: If true, enables CPU CFS quota enforcement for
containers that specify CPU limits.
:param str cpu_cfs_quota_period: The CPU CFS quota period value. Specified
as a sequence of decimal numbers, each with optional fraction and a unit suffix,
such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m",
"h". The value must be a positive duration.
"""
pulumi.set(__self__, "cpu_manager_policy", cpu_manager_policy)
if cpu_cfs_quota is not None:
pulumi.set(__self__, "cpu_cfs_quota", cpu_cfs_quota)
if cpu_cfs_quota_period is not None:
pulumi.set(__self__, "cpu_cfs_quota_period", cpu_cfs_quota_period)
@property
@pulumi.getter(name="cpuManagerPolicy")
def cpu_manager_policy(self) -> str:
"""
The CPU management policy on the node. See
[K8S CPU Management Policies](https://kubernetes.io/docs/tasks/administer-cluster/cpu-management-policies/).
One of `"none"` or `"static"`. Defaults to `none` when `kubelet_config` is unset.
"""
return pulumi.get(self, "cpu_manager_policy")
@property
@pulumi.getter(name="cpuCfsQuota")
def cpu_cfs_quota(self) -> Optional[bool]:
"""
If true, enables CPU CFS quota enforcement for
containers that specify CPU limits.
"""
return pulumi.get(self, "cpu_cfs_quota")
@property
@pulumi.getter(name="cpuCfsQuotaPeriod")
def cpu_cfs_quota_period(self) -> Optional[str]:
"""
The CPU CFS quota period value. Specified
as a sequence of decimal numbers, each with optional fraction and a unit suffix,
such as `"300ms"`. Valid time units are "ns", "us" (or "µs"), "ms", "s", "m",
"h". The value must be a positive duration.
"""
return pulumi.get(self, "cpu_cfs_quota_period")
@pulumi.output_type
class ClusterNodePoolNodeConfigLinuxNodeConfig(dict):
def __init__(__self__, *,
sysctls: Mapping[str, str]):
"""
:param Mapping[str, str] sysctls: The Linux kernel parameters to be applied to the nodes
and all pods running on the nodes. Specified as a map from the key, such as
`net.core.wmem_max`, to a string value.
"""
pulumi.set(__self__, "sysctls", sysctls)
@property
@pulumi.getter
def sysctls(self) -> Mapping[str, str]:
"""
The Linux kernel parameters to be applied to the nodes
and all pods running on the nodes. Specified as a map from the key, such as
`net.core.wmem_max`, to a string value.
"""
return pulumi.get(self, "sysctls")
@pulumi.output_type
class ClusterNodePoolNodeConfigSandboxConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sandboxType":
suggest = "sandbox_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolNodeConfigSandboxConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolNodeConfigSandboxConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolNodeConfigSandboxConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
sandbox_type: str):
"""
:param str sandbox_type: Which sandbox to use for pods in the node pool.
Accepted values are:
"""
pulumi.set(__self__, "sandbox_type", sandbox_type)
@property
@pulumi.getter(name="sandboxType")
def sandbox_type(self) -> str:
"""
Which sandbox to use for pods in the node pool.
Accepted values are:
"""
return pulumi.get(self, "sandbox_type")
@pulumi.output_type
class ClusterNodePoolNodeConfigShieldedInstanceConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enableIntegrityMonitoring":
suggest = "enable_integrity_monitoring"
elif key == "enableSecureBoot":
suggest = "enable_secure_boot"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolNodeConfigShieldedInstanceConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolNodeConfigShieldedInstanceConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolNodeConfigShieldedInstanceConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_integrity_monitoring: Optional[bool] = None,
enable_secure_boot: Optional[bool] = None):
"""
:param bool enable_integrity_monitoring: Defines if the instance has integrity monitoring enabled.
:param bool enable_secure_boot: Defines if the instance has Secure Boot enabled.
"""
if enable_integrity_monitoring is not None:
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
if enable_secure_boot is not None:
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> Optional[bool]:
"""
Defines if the instance has integrity monitoring enabled.
"""
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> Optional[bool]:
"""
Defines if the instance has Secure Boot enabled.
"""
return pulumi.get(self, "enable_secure_boot")
@pulumi.output_type
class ClusterNodePoolNodeConfigTaint(dict):
def __init__(__self__, *,
effect: str,
key: str,
value: str):
"""
:param str effect: Effect for taint. Accepted values are `NO_SCHEDULE`, `PREFER_NO_SCHEDULE`, and `NO_EXECUTE`.
:param str key: Key for taint.
:param str value: Value for taint.
"""
pulumi.set(__self__, "effect", effect)
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def effect(self) -> str:
"""
Effect for taint. Accepted values are `NO_SCHEDULE`, `PREFER_NO_SCHEDULE`, and `NO_EXECUTE`.
"""
return pulumi.get(self, "effect")
@property
@pulumi.getter
def key(self) -> str:
"""
Key for taint.
"""
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
"""
Value for taint.
"""
return pulumi.get(self, "value")
@pulumi.output_type
class ClusterNodePoolNodeConfigWorkloadMetadataConfig(dict):
def __init__(__self__, *,
mode: str):
"""
:param str mode: How to expose the node metadata to the workload running on the node.
Accepted values are:
* UNSPECIFIED: Not Set
* GCE_METADATA: Expose all Compute Engine metadata to pods.
* GKE_METADATA: Run the GKE Metadata Server on this node. The GKE Metadata Server exposes a metadata API to workloads that is compatible with the V1 Compute Metadata APIs exposed by the Compute Engine and App Engine Metadata Servers. This feature can only be enabled if [workload identity](https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity) is enabled at the cluster level.
"""
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def mode(self) -> str:
"""
How to expose the node metadata to the workload running on the node.
Accepted values are:
* UNSPECIFIED: Not Set
* GCE_METADATA: Expose all Compute Engine metadata to pods.
* GKE_METADATA: Run the GKE Metadata Server on this node. The GKE Metadata Server exposes a metadata API to workloads that is compatible with the V1 Compute Metadata APIs exposed by the Compute Engine and App Engine Metadata Servers. This feature can only be enabled if [workload identity](https://cloud.google.com/kubernetes-engine/docs/how-to/workload-identity) is enabled at the cluster level.
"""
return pulumi.get(self, "mode")
@pulumi.output_type
class ClusterNodePoolUpgradeSettings(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxSurge":
suggest = "max_surge"
elif key == "maxUnavailable":
suggest = "max_unavailable"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterNodePoolUpgradeSettings. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterNodePoolUpgradeSettings.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterNodePoolUpgradeSettings.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
max_surge: int,
max_unavailable: int):
pulumi.set(__self__, "max_surge", max_surge)
pulumi.set(__self__, "max_unavailable", max_unavailable)
@property
@pulumi.getter(name="maxSurge")
def max_surge(self) -> int:
return pulumi.get(self, "max_surge")
@property
@pulumi.getter(name="maxUnavailable")
def max_unavailable(self) -> int:
return pulumi.get(self, "max_unavailable")
@pulumi.output_type
class ClusterNotificationConfig(dict):
def __init__(__self__, *,
pubsub: 'outputs.ClusterNotificationConfigPubsub'):
"""
:param 'ClusterNotificationConfigPubsubArgs' pubsub: The pubsub config for the cluster's upgrade notifications.
"""
pulumi.set(__self__, "pubsub", pubsub)
@property
@pulumi.getter
def pubsub(self) -> 'outputs.ClusterNotificationConfigPubsub':
"""
The pubsub config for the cluster's upgrade notifications.
"""
return pulumi.get(self, "pubsub")
@pulumi.output_type
class ClusterNotificationConfigPubsub(dict):
def __init__(__self__, *,
enabled: bool,
topic: Optional[str] = None):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
:param str topic: The pubsub topic to push upgrade notifications to. Must be in the same project as the cluster. Must be in the format: `projects/{project}/topics/{topic}`.
"""
pulumi.set(__self__, "enabled", enabled)
if topic is not None:
pulumi.set(__self__, "topic", topic)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def topic(self) -> Optional[str]:
"""
The pubsub topic to push upgrade notifications to. Must be in the same project as the cluster. Must be in the format: `projects/{project}/topics/{topic}`.
"""
return pulumi.get(self, "topic")
@pulumi.output_type
class ClusterPodSecurityPolicyConfig(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterPrivateClusterConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enablePrivateEndpoint":
suggest = "enable_private_endpoint"
elif key == "enablePrivateNodes":
suggest = "enable_private_nodes"
elif key == "masterGlobalAccessConfig":
suggest = "master_global_access_config"
elif key == "masterIpv4CidrBlock":
suggest = "master_ipv4_cidr_block"
elif key == "peeringName":
suggest = "peering_name"
elif key == "privateEndpoint":
suggest = "private_endpoint"
elif key == "publicEndpoint":
suggest = "public_endpoint"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterPrivateClusterConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterPrivateClusterConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterPrivateClusterConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_private_endpoint: bool,
enable_private_nodes: Optional[bool] = None,
master_global_access_config: Optional['outputs.ClusterPrivateClusterConfigMasterGlobalAccessConfig'] = None,
master_ipv4_cidr_block: Optional[str] = None,
peering_name: Optional[str] = None,
private_endpoint: Optional[str] = None,
public_endpoint: Optional[str] = None):
"""
:param bool enable_private_endpoint: When `true`, the cluster's private
endpoint is used as the cluster endpoint and access through the public endpoint
is disabled. When `false`, either endpoint can be used. This field only applies
to private clusters, when `enable_private_nodes` is `true`.
:param bool enable_private_nodes: Enables the private cluster feature,
creating a private endpoint on the cluster. In a private cluster, nodes only
have RFC 1918 private addresses and communicate with the master's private
endpoint via private networking.
:param 'ClusterPrivateClusterConfigMasterGlobalAccessConfigArgs' master_global_access_config: Controls cluster master global
access settings. If unset, the provider will no longer manage this field and will
not modify the previously-set value. Structure is documented below.
:param str master_ipv4_cidr_block: The IP range in CIDR notation to use for
the hosted master network. This range will be used for assigning private IP
addresses to the cluster master(s) and the ILB VIP. This range must not overlap
with any other ranges in use within the cluster's network, and it must be a /28
subnet. See [Private Cluster Limitations](https://cloud.google.com/kubernetes-engine/docs/how-to/private-clusters#req_res_lim)
for more details. This field only applies to private clusters, when
`enable_private_nodes` is `true`.
:param str peering_name: The name of the peering between this cluster and the Google owned VPC.
:param str private_endpoint: The internal IP address of this cluster's master endpoint.
:param str public_endpoint: The external IP address of this cluster's master endpoint.
"""
pulumi.set(__self__, "enable_private_endpoint", enable_private_endpoint)
if enable_private_nodes is not None:
pulumi.set(__self__, "enable_private_nodes", enable_private_nodes)
if master_global_access_config is not None:
pulumi.set(__self__, "master_global_access_config", master_global_access_config)
if master_ipv4_cidr_block is not None:
pulumi.set(__self__, "master_ipv4_cidr_block", master_ipv4_cidr_block)
if peering_name is not None:
pulumi.set(__self__, "peering_name", peering_name)
if private_endpoint is not None:
pulumi.set(__self__, "private_endpoint", private_endpoint)
if public_endpoint is not None:
pulumi.set(__self__, "public_endpoint", public_endpoint)
@property
@pulumi.getter(name="enablePrivateEndpoint")
def enable_private_endpoint(self) -> bool:
"""
When `true`, the cluster's private
endpoint is used as the cluster endpoint and access through the public endpoint
is disabled. When `false`, either endpoint can be used. This field only applies
to private clusters, when `enable_private_nodes` is `true`.
"""
return pulumi.get(self, "enable_private_endpoint")
@property
@pulumi.getter(name="enablePrivateNodes")
def enable_private_nodes(self) -> Optional[bool]:
"""
Enables the private cluster feature,
creating a private endpoint on the cluster. In a private cluster, nodes only
have RFC 1918 private addresses and communicate with the master's private
endpoint via private networking.
"""
return pulumi.get(self, "enable_private_nodes")
@property
@pulumi.getter(name="masterGlobalAccessConfig")
def master_global_access_config(self) -> Optional['outputs.ClusterPrivateClusterConfigMasterGlobalAccessConfig']:
"""
Controls cluster master global
access settings. If unset, the provider will no longer manage this field and will
not modify the previously-set value. Structure is documented below.
"""
return pulumi.get(self, "master_global_access_config")
@property
@pulumi.getter(name="masterIpv4CidrBlock")
def master_ipv4_cidr_block(self) -> Optional[str]:
"""
The IP range in CIDR notation to use for
the hosted master network. This range will be used for assigning private IP
addresses to the cluster master(s) and the ILB VIP. This range must not overlap
with any other ranges in use within the cluster's network, and it must be a /28
subnet. See [Private Cluster Limitations](https://cloud.google.com/kubernetes-engine/docs/how-to/private-clusters#req_res_lim)
for more details. This field only applies to private clusters, when
`enable_private_nodes` is `true`.
"""
return pulumi.get(self, "master_ipv4_cidr_block")
@property
@pulumi.getter(name="peeringName")
def peering_name(self) -> Optional[str]:
"""
The name of the peering between this cluster and the Google owned VPC.
"""
return pulumi.get(self, "peering_name")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> Optional[str]:
"""
The internal IP address of this cluster's master endpoint.
"""
return pulumi.get(self, "private_endpoint")
@property
@pulumi.getter(name="publicEndpoint")
def public_endpoint(self) -> Optional[str]:
"""
The external IP address of this cluster's master endpoint.
"""
return pulumi.get(self, "public_endpoint")
@pulumi.output_type
class ClusterPrivateClusterConfigMasterGlobalAccessConfig(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterReleaseChannel(dict):
def __init__(__self__, *,
channel: str):
"""
:param str channel: The selected release channel.
Accepted values are:
* UNSPECIFIED: Not set.
* RAPID: Weekly upgrade cadence; Early testers and developers who requires new features.
* REGULAR: Multiple per month upgrade cadence; Production users who need features not yet offered in the Stable channel.
* STABLE: Every few months upgrade cadence; Production users who need stability above all else, and for whom frequent upgrades are too risky.
"""
pulumi.set(__self__, "channel", channel)
@property
@pulumi.getter
def channel(self) -> str:
"""
The selected release channel.
Accepted values are:
* UNSPECIFIED: Not set.
* RAPID: Weekly upgrade cadence; Early testers and developers who requires new features.
* REGULAR: Multiple per month upgrade cadence; Production users who need features not yet offered in the Stable channel.
* STABLE: Every few months upgrade cadence; Production users who need stability above all else, and for whom frequent upgrades are too risky.
"""
return pulumi.get(self, "channel")
@pulumi.output_type
class ClusterResourceUsageExportConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "bigqueryDestination":
suggest = "bigquery_destination"
elif key == "enableNetworkEgressMetering":
suggest = "enable_network_egress_metering"
elif key == "enableResourceConsumptionMetering":
suggest = "enable_resource_consumption_metering"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterResourceUsageExportConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterResourceUsageExportConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterResourceUsageExportConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
bigquery_destination: 'outputs.ClusterResourceUsageExportConfigBigqueryDestination',
enable_network_egress_metering: Optional[bool] = None,
enable_resource_consumption_metering: Optional[bool] = None):
"""
:param 'ClusterResourceUsageExportConfigBigqueryDestinationArgs' bigquery_destination: Parameters for using BigQuery as the destination of resource usage export.
:param bool enable_network_egress_metering: Whether to enable network egress metering for this cluster. If enabled, a daemonset will be created
in the cluster to meter network egress traffic.
:param bool enable_resource_consumption_metering: Whether to enable resource
consumption metering on this cluster. When enabled, a table will be created in
the resource export BigQuery dataset to store resource consumption data. The
resulting table can be joined with the resource usage table or with BigQuery
billing export. Defaults to `true`.
"""
pulumi.set(__self__, "bigquery_destination", bigquery_destination)
if enable_network_egress_metering is not None:
pulumi.set(__self__, "enable_network_egress_metering", enable_network_egress_metering)
if enable_resource_consumption_metering is not None:
pulumi.set(__self__, "enable_resource_consumption_metering", enable_resource_consumption_metering)
@property
@pulumi.getter(name="bigqueryDestination")
def bigquery_destination(self) -> 'outputs.ClusterResourceUsageExportConfigBigqueryDestination':
"""
Parameters for using BigQuery as the destination of resource usage export.
"""
return pulumi.get(self, "bigquery_destination")
@property
@pulumi.getter(name="enableNetworkEgressMetering")
def enable_network_egress_metering(self) -> Optional[bool]:
"""
Whether to enable network egress metering for this cluster. If enabled, a daemonset will be created
in the cluster to meter network egress traffic.
"""
return pulumi.get(self, "enable_network_egress_metering")
@property
@pulumi.getter(name="enableResourceConsumptionMetering")
def enable_resource_consumption_metering(self) -> Optional[bool]:
"""
Whether to enable resource
consumption metering on this cluster. When enabled, a table will be created in
the resource export BigQuery dataset to store resource consumption data. The
resulting table can be joined with the resource usage table or with BigQuery
billing export. Defaults to `true`.
"""
return pulumi.get(self, "enable_resource_consumption_metering")
@pulumi.output_type
class ClusterResourceUsageExportConfigBigqueryDestination(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "datasetId":
suggest = "dataset_id"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterResourceUsageExportConfigBigqueryDestination. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterResourceUsageExportConfigBigqueryDestination.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterResourceUsageExportConfigBigqueryDestination.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
dataset_id: str):
pulumi.set(__self__, "dataset_id", dataset_id)
@property
@pulumi.getter(name="datasetId")
def dataset_id(self) -> str:
return pulumi.get(self, "dataset_id")
@pulumi.output_type
class ClusterVerticalPodAutoscaling(dict):
def __init__(__self__, *,
enabled: bool):
"""
:param bool enabled: Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
"""
Enable the PodSecurityPolicy controller for this cluster.
If enabled, pods must be valid under a PodSecurityPolicy to be created.
"""
return pulumi.get(self, "enabled")
@pulumi.output_type
class ClusterWorkloadIdentityConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "workloadPool":
suggest = "workload_pool"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in ClusterWorkloadIdentityConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
ClusterWorkloadIdentityConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
ClusterWorkloadIdentityConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
workload_pool: Optional[str] = None):
"""
:param str workload_pool: The workload pool to attach all Kubernetes service accounts to. Currently, the only supported identity namespace is the project of the cluster.
"""
if workload_pool is not None:
pulumi.set(__self__, "workload_pool", workload_pool)
@property
@pulumi.getter(name="workloadPool")
def workload_pool(self) -> Optional[str]:
"""
The workload pool to attach all Kubernetes service accounts to. Currently, the only supported identity namespace is the project of the cluster.
"""
return pulumi.get(self, "workload_pool")
@pulumi.output_type
class NodePoolAutoscaling(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxNodeCount":
suggest = "max_node_count"
elif key == "minNodeCount":
suggest = "min_node_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolAutoscaling. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolAutoscaling.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolAutoscaling.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
max_node_count: int,
min_node_count: int):
"""
:param int max_node_count: Maximum number of nodes in the NodePool. Must be >= min_node_count.
:param int min_node_count: Minimum number of nodes in the NodePool. Must be >=0 and
<= `max_node_count`.
"""
pulumi.set(__self__, "max_node_count", max_node_count)
pulumi.set(__self__, "min_node_count", min_node_count)
@property
@pulumi.getter(name="maxNodeCount")
def max_node_count(self) -> int:
"""
Maximum number of nodes in the NodePool. Must be >= min_node_count.
"""
return pulumi.get(self, "max_node_count")
@property
@pulumi.getter(name="minNodeCount")
def min_node_count(self) -> int:
"""
Minimum number of nodes in the NodePool. Must be >=0 and
<= `max_node_count`.
"""
return pulumi.get(self, "min_node_count")
@pulumi.output_type
class NodePoolManagement(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "autoRepair":
suggest = "auto_repair"
elif key == "autoUpgrade":
suggest = "auto_upgrade"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolManagement. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolManagement.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolManagement.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
auto_repair: Optional[bool] = None,
auto_upgrade: Optional[bool] = None):
"""
:param bool auto_repair: Whether the nodes will be automatically repaired.
:param bool auto_upgrade: Whether the nodes will be automatically upgraded.
"""
if auto_repair is not None:
pulumi.set(__self__, "auto_repair", auto_repair)
if auto_upgrade is not None:
pulumi.set(__self__, "auto_upgrade", auto_upgrade)
@property
@pulumi.getter(name="autoRepair")
def auto_repair(self) -> Optional[bool]:
"""
Whether the nodes will be automatically repaired.
"""
return pulumi.get(self, "auto_repair")
@property
@pulumi.getter(name="autoUpgrade")
def auto_upgrade(self) -> Optional[bool]:
"""
Whether the nodes will be automatically upgraded.
"""
return pulumi.get(self, "auto_upgrade")
@pulumi.output_type
class NodePoolNetworkConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "podRange":
suggest = "pod_range"
elif key == "createPodRange":
suggest = "create_pod_range"
elif key == "podIpv4CidrBlock":
suggest = "pod_ipv4_cidr_block"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolNetworkConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolNetworkConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolNetworkConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
pod_range: str,
create_pod_range: Optional[bool] = None,
pod_ipv4_cidr_block: Optional[str] = None):
pulumi.set(__self__, "pod_range", pod_range)
if create_pod_range is not None:
pulumi.set(__self__, "create_pod_range", create_pod_range)
if pod_ipv4_cidr_block is not None:
pulumi.set(__self__, "pod_ipv4_cidr_block", pod_ipv4_cidr_block)
@property
@pulumi.getter(name="podRange")
def pod_range(self) -> str:
return pulumi.get(self, "pod_range")
@property
@pulumi.getter(name="createPodRange")
def create_pod_range(self) -> Optional[bool]:
return pulumi.get(self, "create_pod_range")
@property
@pulumi.getter(name="podIpv4CidrBlock")
def pod_ipv4_cidr_block(self) -> Optional[str]:
return pulumi.get(self, "pod_ipv4_cidr_block")
@pulumi.output_type
class NodePoolNodeConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "bootDiskKmsKey":
suggest = "boot_disk_kms_key"
elif key == "diskSizeGb":
suggest = "disk_size_gb"
elif key == "diskType":
suggest = "disk_type"
elif key == "ephemeralStorageConfig":
suggest = "ephemeral_storage_config"
elif key == "gcfsConfig":
suggest = "gcfs_config"
elif key == "guestAccelerators":
suggest = "guest_accelerators"
elif key == "imageType":
suggest = "image_type"
elif key == "kubeletConfig":
suggest = "kubelet_config"
elif key == "linuxNodeConfig":
suggest = "linux_node_config"
elif key == "localSsdCount":
suggest = "local_ssd_count"
elif key == "machineType":
suggest = "machine_type"
elif key == "minCpuPlatform":
suggest = "min_cpu_platform"
elif key == "nodeGroup":
suggest = "node_group"
elif key == "oauthScopes":
suggest = "oauth_scopes"
elif key == "sandboxConfig":
suggest = "sandbox_config"
elif key == "serviceAccount":
suggest = "service_account"
elif key == "shieldedInstanceConfig":
suggest = "shielded_instance_config"
elif key == "workloadMetadataConfig":
suggest = "workload_metadata_config"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolNodeConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolNodeConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolNodeConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
boot_disk_kms_key: Optional[str] = None,
disk_size_gb: Optional[int] = None,
disk_type: Optional[str] = None,
ephemeral_storage_config: Optional['outputs.NodePoolNodeConfigEphemeralStorageConfig'] = None,
gcfs_config: Optional['outputs.NodePoolNodeConfigGcfsConfig'] = None,
guest_accelerators: Optional[Sequence['outputs.NodePoolNodeConfigGuestAccelerator']] = None,
image_type: Optional[str] = None,
kubelet_config: Optional['outputs.NodePoolNodeConfigKubeletConfig'] = None,
labels: Optional[Mapping[str, str]] = None,
linux_node_config: Optional['outputs.NodePoolNodeConfigLinuxNodeConfig'] = None,
local_ssd_count: Optional[int] = None,
machine_type: Optional[str] = None,
metadata: Optional[Mapping[str, str]] = None,
min_cpu_platform: Optional[str] = None,
node_group: Optional[str] = None,
oauth_scopes: Optional[Sequence[str]] = None,
preemptible: Optional[bool] = None,
sandbox_config: Optional['outputs.NodePoolNodeConfigSandboxConfig'] = None,
service_account: Optional[str] = None,
shielded_instance_config: Optional['outputs.NodePoolNodeConfigShieldedInstanceConfig'] = None,
spot: Optional[bool] = None,
tags: Optional[Sequence[str]] = None,
taints: Optional[Sequence['outputs.NodePoolNodeConfigTaint']] = None,
workload_metadata_config: Optional['outputs.NodePoolNodeConfigWorkloadMetadataConfig'] = None):
if boot_disk_kms_key is not None:
pulumi.set(__self__, "boot_disk_kms_key", boot_disk_kms_key)
if disk_size_gb is not None:
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
if disk_type is not None:
pulumi.set(__self__, "disk_type", disk_type)
if ephemeral_storage_config is not None:
pulumi.set(__self__, "ephemeral_storage_config", ephemeral_storage_config)
if gcfs_config is not None:
pulumi.set(__self__, "gcfs_config", gcfs_config)
if guest_accelerators is not None:
pulumi.set(__self__, "guest_accelerators", guest_accelerators)
if image_type is not None:
pulumi.set(__self__, "image_type", image_type)
if kubelet_config is not None:
pulumi.set(__self__, "kubelet_config", kubelet_config)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if linux_node_config is not None:
pulumi.set(__self__, "linux_node_config", linux_node_config)
if local_ssd_count is not None:
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
if machine_type is not None:
pulumi.set(__self__, "machine_type", machine_type)
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if min_cpu_platform is not None:
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
if node_group is not None:
pulumi.set(__self__, "node_group", node_group)
if oauth_scopes is not None:
pulumi.set(__self__, "oauth_scopes", oauth_scopes)
if preemptible is not None:
pulumi.set(__self__, "preemptible", preemptible)
if sandbox_config is not None:
pulumi.set(__self__, "sandbox_config", sandbox_config)
if service_account is not None:
pulumi.set(__self__, "service_account", service_account)
if shielded_instance_config is not None:
pulumi.set(__self__, "shielded_instance_config", shielded_instance_config)
if spot is not None:
pulumi.set(__self__, "spot", spot)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if taints is not None:
pulumi.set(__self__, "taints", taints)
if workload_metadata_config is not None:
pulumi.set(__self__, "workload_metadata_config", workload_metadata_config)
@property
@pulumi.getter(name="bootDiskKmsKey")
def boot_disk_kms_key(self) -> Optional[str]:
return pulumi.get(self, "boot_disk_kms_key")
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> Optional[int]:
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> Optional[str]:
return pulumi.get(self, "disk_type")
@property
@pulumi.getter(name="ephemeralStorageConfig")
def ephemeral_storage_config(self) -> Optional['outputs.NodePoolNodeConfigEphemeralStorageConfig']:
return pulumi.get(self, "ephemeral_storage_config")
@property
@pulumi.getter(name="gcfsConfig")
def gcfs_config(self) -> Optional['outputs.NodePoolNodeConfigGcfsConfig']:
return pulumi.get(self, "gcfs_config")
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> Optional[Sequence['outputs.NodePoolNodeConfigGuestAccelerator']]:
return pulumi.get(self, "guest_accelerators")
@property
@pulumi.getter(name="imageType")
def image_type(self) -> Optional[str]:
return pulumi.get(self, "image_type")
@property
@pulumi.getter(name="kubeletConfig")
def kubelet_config(self) -> Optional['outputs.NodePoolNodeConfigKubeletConfig']:
return pulumi.get(self, "kubelet_config")
@property
@pulumi.getter
def labels(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="linuxNodeConfig")
def linux_node_config(self) -> Optional['outputs.NodePoolNodeConfigLinuxNodeConfig']:
return pulumi.get(self, "linux_node_config")
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> Optional[int]:
return pulumi.get(self, "local_ssd_count")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> Optional[str]:
return pulumi.get(self, "machine_type")
@property
@pulumi.getter
def metadata(self) -> Optional[Mapping[str, str]]:
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> Optional[str]:
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter(name="nodeGroup")
def node_group(self) -> Optional[str]:
return pulumi.get(self, "node_group")
@property
@pulumi.getter(name="oauthScopes")
def oauth_scopes(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "oauth_scopes")
@property
@pulumi.getter
def preemptible(self) -> Optional[bool]:
return pulumi.get(self, "preemptible")
@property
@pulumi.getter(name="sandboxConfig")
def sandbox_config(self) -> Optional['outputs.NodePoolNodeConfigSandboxConfig']:
return pulumi.get(self, "sandbox_config")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> Optional[str]:
return pulumi.get(self, "service_account")
@property
@pulumi.getter(name="shieldedInstanceConfig")
def shielded_instance_config(self) -> Optional['outputs.NodePoolNodeConfigShieldedInstanceConfig']:
return pulumi.get(self, "shielded_instance_config")
@property
@pulumi.getter
def spot(self) -> Optional[bool]:
return pulumi.get(self, "spot")
@property
@pulumi.getter
def tags(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def taints(self) -> Optional[Sequence['outputs.NodePoolNodeConfigTaint']]:
return pulumi.get(self, "taints")
@property
@pulumi.getter(name="workloadMetadataConfig")
def workload_metadata_config(self) -> Optional['outputs.NodePoolNodeConfigWorkloadMetadataConfig']:
return pulumi.get(self, "workload_metadata_config")
@pulumi.output_type
class NodePoolNodeConfigEphemeralStorageConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "localSsdCount":
suggest = "local_ssd_count"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolNodeConfigEphemeralStorageConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolNodeConfigEphemeralStorageConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolNodeConfigEphemeralStorageConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
local_ssd_count: int):
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> int:
return pulumi.get(self, "local_ssd_count")
@pulumi.output_type
class NodePoolNodeConfigGcfsConfig(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class NodePoolNodeConfigGuestAccelerator(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "gpuPartitionSize":
suggest = "gpu_partition_size"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolNodeConfigGuestAccelerator. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolNodeConfigGuestAccelerator.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolNodeConfigGuestAccelerator.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
count: int,
type: str,
gpu_partition_size: Optional[str] = None):
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "type", type)
if gpu_partition_size is not None:
pulumi.set(__self__, "gpu_partition_size", gpu_partition_size)
@property
@pulumi.getter
def count(self) -> int:
return pulumi.get(self, "count")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
@property
@pulumi.getter(name="gpuPartitionSize")
def gpu_partition_size(self) -> Optional[str]:
return pulumi.get(self, "gpu_partition_size")
@pulumi.output_type
class NodePoolNodeConfigKubeletConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "cpuManagerPolicy":
suggest = "cpu_manager_policy"
elif key == "cpuCfsQuota":
suggest = "cpu_cfs_quota"
elif key == "cpuCfsQuotaPeriod":
suggest = "cpu_cfs_quota_period"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolNodeConfigKubeletConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolNodeConfigKubeletConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolNodeConfigKubeletConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
cpu_manager_policy: str,
cpu_cfs_quota: Optional[bool] = None,
cpu_cfs_quota_period: Optional[str] = None):
pulumi.set(__self__, "cpu_manager_policy", cpu_manager_policy)
if cpu_cfs_quota is not None:
pulumi.set(__self__, "cpu_cfs_quota", cpu_cfs_quota)
if cpu_cfs_quota_period is not None:
pulumi.set(__self__, "cpu_cfs_quota_period", cpu_cfs_quota_period)
@property
@pulumi.getter(name="cpuManagerPolicy")
def cpu_manager_policy(self) -> str:
return pulumi.get(self, "cpu_manager_policy")
@property
@pulumi.getter(name="cpuCfsQuota")
def cpu_cfs_quota(self) -> Optional[bool]:
return pulumi.get(self, "cpu_cfs_quota")
@property
@pulumi.getter(name="cpuCfsQuotaPeriod")
def cpu_cfs_quota_period(self) -> Optional[str]:
return pulumi.get(self, "cpu_cfs_quota_period")
@pulumi.output_type
class NodePoolNodeConfigLinuxNodeConfig(dict):
def __init__(__self__, *,
sysctls: Mapping[str, str]):
pulumi.set(__self__, "sysctls", sysctls)
@property
@pulumi.getter
def sysctls(self) -> Mapping[str, str]:
return pulumi.get(self, "sysctls")
@pulumi.output_type
class NodePoolNodeConfigSandboxConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "sandboxType":
suggest = "sandbox_type"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolNodeConfigSandboxConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolNodeConfigSandboxConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolNodeConfigSandboxConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
sandbox_type: str):
pulumi.set(__self__, "sandbox_type", sandbox_type)
@property
@pulumi.getter(name="sandboxType")
def sandbox_type(self) -> str:
return pulumi.get(self, "sandbox_type")
@pulumi.output_type
class NodePoolNodeConfigShieldedInstanceConfig(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "enableIntegrityMonitoring":
suggest = "enable_integrity_monitoring"
elif key == "enableSecureBoot":
suggest = "enable_secure_boot"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolNodeConfigShieldedInstanceConfig. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolNodeConfigShieldedInstanceConfig.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolNodeConfigShieldedInstanceConfig.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
enable_integrity_monitoring: Optional[bool] = None,
enable_secure_boot: Optional[bool] = None):
if enable_integrity_monitoring is not None:
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
if enable_secure_boot is not None:
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> Optional[bool]:
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> Optional[bool]:
return pulumi.get(self, "enable_secure_boot")
@pulumi.output_type
class NodePoolNodeConfigTaint(dict):
def __init__(__self__, *,
effect: str,
key: str,
value: str):
pulumi.set(__self__, "effect", effect)
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def effect(self) -> str:
return pulumi.get(self, "effect")
@property
@pulumi.getter
def key(self) -> str:
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
return pulumi.get(self, "value")
@pulumi.output_type
class NodePoolNodeConfigWorkloadMetadataConfig(dict):
def __init__(__self__, *,
mode: str):
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def mode(self) -> str:
return pulumi.get(self, "mode")
@pulumi.output_type
class NodePoolUpgradeSettings(dict):
@staticmethod
def __key_warning(key: str):
suggest = None
if key == "maxSurge":
suggest = "max_surge"
elif key == "maxUnavailable":
suggest = "max_unavailable"
if suggest:
pulumi.log.warn(f"Key '{key}' not found in NodePoolUpgradeSettings. Access the value via the '{suggest}' property getter instead.")
def __getitem__(self, key: str) -> Any:
NodePoolUpgradeSettings.__key_warning(key)
return super().__getitem__(key)
def get(self, key: str, default = None) -> Any:
NodePoolUpgradeSettings.__key_warning(key)
return super().get(key, default)
def __init__(__self__, *,
max_surge: int,
max_unavailable: int):
"""
:param int max_surge: The number of additional nodes that can be added to the node pool during
an upgrade. Increasing `max_surge` raises the number of nodes that can be upgraded simultaneously.
Can be set to 0 or greater.
:param int max_unavailable: The number of nodes that can be simultaneously unavailable during
an upgrade. Increasing `max_unavailable` raises the number of nodes that can be upgraded in
parallel. Can be set to 0 or greater.
"""
pulumi.set(__self__, "max_surge", max_surge)
pulumi.set(__self__, "max_unavailable", max_unavailable)
@property
@pulumi.getter(name="maxSurge")
def max_surge(self) -> int:
"""
The number of additional nodes that can be added to the node pool during
an upgrade. Increasing `max_surge` raises the number of nodes that can be upgraded simultaneously.
Can be set to 0 or greater.
"""
return pulumi.get(self, "max_surge")
@property
@pulumi.getter(name="maxUnavailable")
def max_unavailable(self) -> int:
"""
The number of nodes that can be simultaneously unavailable during
an upgrade. Increasing `max_unavailable` raises the number of nodes that can be upgraded in
parallel. Can be set to 0 or greater.
"""
return pulumi.get(self, "max_unavailable")
@pulumi.output_type
class GetClusterAddonsConfigResult(dict):
def __init__(__self__, *,
cloudrun_configs: Sequence['outputs.GetClusterAddonsConfigCloudrunConfigResult'],
config_connector_configs: Sequence['outputs.GetClusterAddonsConfigConfigConnectorConfigResult'],
dns_cache_configs: Sequence['outputs.GetClusterAddonsConfigDnsCacheConfigResult'],
gce_persistent_disk_csi_driver_configs: Sequence['outputs.GetClusterAddonsConfigGcePersistentDiskCsiDriverConfigResult'],
horizontal_pod_autoscalings: Sequence['outputs.GetClusterAddonsConfigHorizontalPodAutoscalingResult'],
http_load_balancings: Sequence['outputs.GetClusterAddonsConfigHttpLoadBalancingResult'],
istio_configs: Sequence['outputs.GetClusterAddonsConfigIstioConfigResult'],
kalm_configs: Sequence['outputs.GetClusterAddonsConfigKalmConfigResult'],
network_policy_configs: Sequence['outputs.GetClusterAddonsConfigNetworkPolicyConfigResult']):
pulumi.set(__self__, "cloudrun_configs", cloudrun_configs)
pulumi.set(__self__, "config_connector_configs", config_connector_configs)
pulumi.set(__self__, "dns_cache_configs", dns_cache_configs)
pulumi.set(__self__, "gce_persistent_disk_csi_driver_configs", gce_persistent_disk_csi_driver_configs)
pulumi.set(__self__, "horizontal_pod_autoscalings", horizontal_pod_autoscalings)
pulumi.set(__self__, "http_load_balancings", http_load_balancings)
pulumi.set(__self__, "istio_configs", istio_configs)
pulumi.set(__self__, "kalm_configs", kalm_configs)
pulumi.set(__self__, "network_policy_configs", network_policy_configs)
@property
@pulumi.getter(name="cloudrunConfigs")
def cloudrun_configs(self) -> Sequence['outputs.GetClusterAddonsConfigCloudrunConfigResult']:
return pulumi.get(self, "cloudrun_configs")
@property
@pulumi.getter(name="configConnectorConfigs")
def config_connector_configs(self) -> Sequence['outputs.GetClusterAddonsConfigConfigConnectorConfigResult']:
return pulumi.get(self, "config_connector_configs")
@property
@pulumi.getter(name="dnsCacheConfigs")
def dns_cache_configs(self) -> Sequence['outputs.GetClusterAddonsConfigDnsCacheConfigResult']:
return pulumi.get(self, "dns_cache_configs")
@property
@pulumi.getter(name="gcePersistentDiskCsiDriverConfigs")
def gce_persistent_disk_csi_driver_configs(self) -> Sequence['outputs.GetClusterAddonsConfigGcePersistentDiskCsiDriverConfigResult']:
return pulumi.get(self, "gce_persistent_disk_csi_driver_configs")
@property
@pulumi.getter(name="horizontalPodAutoscalings")
def horizontal_pod_autoscalings(self) -> Sequence['outputs.GetClusterAddonsConfigHorizontalPodAutoscalingResult']:
return pulumi.get(self, "horizontal_pod_autoscalings")
@property
@pulumi.getter(name="httpLoadBalancings")
def http_load_balancings(self) -> Sequence['outputs.GetClusterAddonsConfigHttpLoadBalancingResult']:
return pulumi.get(self, "http_load_balancings")
@property
@pulumi.getter(name="istioConfigs")
def istio_configs(self) -> Sequence['outputs.GetClusterAddonsConfigIstioConfigResult']:
return pulumi.get(self, "istio_configs")
@property
@pulumi.getter(name="kalmConfigs")
def kalm_configs(self) -> Sequence['outputs.GetClusterAddonsConfigKalmConfigResult']:
return pulumi.get(self, "kalm_configs")
@property
@pulumi.getter(name="networkPolicyConfigs")
def network_policy_configs(self) -> Sequence['outputs.GetClusterAddonsConfigNetworkPolicyConfigResult']:
return pulumi.get(self, "network_policy_configs")
@pulumi.output_type
class GetClusterAddonsConfigCloudrunConfigResult(dict):
def __init__(__self__, *,
disabled: bool,
load_balancer_type: str):
pulumi.set(__self__, "disabled", disabled)
pulumi.set(__self__, "load_balancer_type", load_balancer_type)
@property
@pulumi.getter
def disabled(self) -> bool:
return pulumi.get(self, "disabled")
@property
@pulumi.getter(name="loadBalancerType")
def load_balancer_type(self) -> str:
return pulumi.get(self, "load_balancer_type")
@pulumi.output_type
class GetClusterAddonsConfigConfigConnectorConfigResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterAddonsConfigDnsCacheConfigResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterAddonsConfigGcePersistentDiskCsiDriverConfigResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterAddonsConfigHorizontalPodAutoscalingResult(dict):
def __init__(__self__, *,
disabled: bool):
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def disabled(self) -> bool:
return pulumi.get(self, "disabled")
@pulumi.output_type
class GetClusterAddonsConfigHttpLoadBalancingResult(dict):
def __init__(__self__, *,
disabled: bool):
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def disabled(self) -> bool:
return pulumi.get(self, "disabled")
@pulumi.output_type
class GetClusterAddonsConfigIstioConfigResult(dict):
def __init__(__self__, *,
auth: str,
disabled: bool):
pulumi.set(__self__, "auth", auth)
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def auth(self) -> str:
return pulumi.get(self, "auth")
@property
@pulumi.getter
def disabled(self) -> bool:
return pulumi.get(self, "disabled")
@pulumi.output_type
class GetClusterAddonsConfigKalmConfigResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterAddonsConfigNetworkPolicyConfigResult(dict):
def __init__(__self__, *,
disabled: bool):
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def disabled(self) -> bool:
return pulumi.get(self, "disabled")
@pulumi.output_type
class GetClusterAuthenticatorGroupsConfigResult(dict):
def __init__(__self__, *,
security_group: str):
pulumi.set(__self__, "security_group", security_group)
@property
@pulumi.getter(name="securityGroup")
def security_group(self) -> str:
return pulumi.get(self, "security_group")
@pulumi.output_type
class GetClusterClusterAutoscalingResult(dict):
def __init__(__self__, *,
auto_provisioning_defaults: Sequence['outputs.GetClusterClusterAutoscalingAutoProvisioningDefaultResult'],
autoscaling_profile: str,
enabled: bool,
resource_limits: Sequence['outputs.GetClusterClusterAutoscalingResourceLimitResult']):
pulumi.set(__self__, "auto_provisioning_defaults", auto_provisioning_defaults)
pulumi.set(__self__, "autoscaling_profile", autoscaling_profile)
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "resource_limits", resource_limits)
@property
@pulumi.getter(name="autoProvisioningDefaults")
def auto_provisioning_defaults(self) -> Sequence['outputs.GetClusterClusterAutoscalingAutoProvisioningDefaultResult']:
return pulumi.get(self, "auto_provisioning_defaults")
@property
@pulumi.getter(name="autoscalingProfile")
def autoscaling_profile(self) -> str:
return pulumi.get(self, "autoscaling_profile")
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="resourceLimits")
def resource_limits(self) -> Sequence['outputs.GetClusterClusterAutoscalingResourceLimitResult']:
return pulumi.get(self, "resource_limits")
@pulumi.output_type
class GetClusterClusterAutoscalingAutoProvisioningDefaultResult(dict):
def __init__(__self__, *,
min_cpu_platform: str,
oauth_scopes: Sequence[str],
service_account: str):
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
pulumi.set(__self__, "oauth_scopes", oauth_scopes)
pulumi.set(__self__, "service_account", service_account)
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> str:
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter(name="oauthScopes")
def oauth_scopes(self) -> Sequence[str]:
return pulumi.get(self, "oauth_scopes")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> str:
return pulumi.get(self, "service_account")
@pulumi.output_type
class GetClusterClusterAutoscalingResourceLimitResult(dict):
def __init__(__self__, *,
maximum: int,
minimum: int,
resource_type: str):
pulumi.set(__self__, "maximum", maximum)
pulumi.set(__self__, "minimum", minimum)
pulumi.set(__self__, "resource_type", resource_type)
@property
@pulumi.getter
def maximum(self) -> int:
return pulumi.get(self, "maximum")
@property
@pulumi.getter
def minimum(self) -> int:
return pulumi.get(self, "minimum")
@property
@pulumi.getter(name="resourceType")
def resource_type(self) -> str:
return pulumi.get(self, "resource_type")
@pulumi.output_type
class GetClusterClusterTelemetryResult(dict):
def __init__(__self__, *,
type: str):
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
@pulumi.output_type
class GetClusterConfidentialNodeResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterDatabaseEncryptionResult(dict):
def __init__(__self__, *,
key_name: str,
state: str):
pulumi.set(__self__, "key_name", key_name)
pulumi.set(__self__, "state", state)
@property
@pulumi.getter(name="keyName")
def key_name(self) -> str:
return pulumi.get(self, "key_name")
@property
@pulumi.getter
def state(self) -> str:
return pulumi.get(self, "state")
@pulumi.output_type
class GetClusterDefaultSnatStatusResult(dict):
def __init__(__self__, *,
disabled: bool):
pulumi.set(__self__, "disabled", disabled)
@property
@pulumi.getter
def disabled(self) -> bool:
return pulumi.get(self, "disabled")
@pulumi.output_type
class GetClusterDnsConfigResult(dict):
def __init__(__self__, *,
cluster_dns: str,
cluster_dns_domain: str,
cluster_dns_scope: str):
pulumi.set(__self__, "cluster_dns", cluster_dns)
pulumi.set(__self__, "cluster_dns_domain", cluster_dns_domain)
pulumi.set(__self__, "cluster_dns_scope", cluster_dns_scope)
@property
@pulumi.getter(name="clusterDns")
def cluster_dns(self) -> str:
return pulumi.get(self, "cluster_dns")
@property
@pulumi.getter(name="clusterDnsDomain")
def cluster_dns_domain(self) -> str:
return pulumi.get(self, "cluster_dns_domain")
@property
@pulumi.getter(name="clusterDnsScope")
def cluster_dns_scope(self) -> str:
return pulumi.get(self, "cluster_dns_scope")
@pulumi.output_type
class GetClusterIpAllocationPolicyResult(dict):
def __init__(__self__, *,
cluster_ipv4_cidr_block: str,
cluster_secondary_range_name: str,
services_ipv4_cidr_block: str,
services_secondary_range_name: str):
pulumi.set(__self__, "cluster_ipv4_cidr_block", cluster_ipv4_cidr_block)
pulumi.set(__self__, "cluster_secondary_range_name", cluster_secondary_range_name)
pulumi.set(__self__, "services_ipv4_cidr_block", services_ipv4_cidr_block)
pulumi.set(__self__, "services_secondary_range_name", services_secondary_range_name)
@property
@pulumi.getter(name="clusterIpv4CidrBlock")
def cluster_ipv4_cidr_block(self) -> str:
return pulumi.get(self, "cluster_ipv4_cidr_block")
@property
@pulumi.getter(name="clusterSecondaryRangeName")
def cluster_secondary_range_name(self) -> str:
return pulumi.get(self, "cluster_secondary_range_name")
@property
@pulumi.getter(name="servicesIpv4CidrBlock")
def services_ipv4_cidr_block(self) -> str:
return pulumi.get(self, "services_ipv4_cidr_block")
@property
@pulumi.getter(name="servicesSecondaryRangeName")
def services_secondary_range_name(self) -> str:
return pulumi.get(self, "services_secondary_range_name")
@pulumi.output_type
class GetClusterLoggingConfigResult(dict):
def __init__(__self__, *,
enable_components: Sequence[str]):
pulumi.set(__self__, "enable_components", enable_components)
@property
@pulumi.getter(name="enableComponents")
def enable_components(self) -> Sequence[str]:
return pulumi.get(self, "enable_components")
@pulumi.output_type
class GetClusterMaintenancePolicyResult(dict):
def __init__(__self__, *,
daily_maintenance_windows: Sequence['outputs.GetClusterMaintenancePolicyDailyMaintenanceWindowResult'],
maintenance_exclusions: Sequence['outputs.GetClusterMaintenancePolicyMaintenanceExclusionResult'],
recurring_windows: Sequence['outputs.GetClusterMaintenancePolicyRecurringWindowResult']):
pulumi.set(__self__, "daily_maintenance_windows", daily_maintenance_windows)
pulumi.set(__self__, "maintenance_exclusions", maintenance_exclusions)
pulumi.set(__self__, "recurring_windows", recurring_windows)
@property
@pulumi.getter(name="dailyMaintenanceWindows")
def daily_maintenance_windows(self) -> Sequence['outputs.GetClusterMaintenancePolicyDailyMaintenanceWindowResult']:
return pulumi.get(self, "daily_maintenance_windows")
@property
@pulumi.getter(name="maintenanceExclusions")
def maintenance_exclusions(self) -> Sequence['outputs.GetClusterMaintenancePolicyMaintenanceExclusionResult']:
return pulumi.get(self, "maintenance_exclusions")
@property
@pulumi.getter(name="recurringWindows")
def recurring_windows(self) -> Sequence['outputs.GetClusterMaintenancePolicyRecurringWindowResult']:
return pulumi.get(self, "recurring_windows")
@pulumi.output_type
class GetClusterMaintenancePolicyDailyMaintenanceWindowResult(dict):
def __init__(__self__, *,
duration: str,
start_time: str):
pulumi.set(__self__, "duration", duration)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter
def duration(self) -> str:
return pulumi.get(self, "duration")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
return pulumi.get(self, "start_time")
@pulumi.output_type
class GetClusterMaintenancePolicyMaintenanceExclusionResult(dict):
def __init__(__self__, *,
end_time: str,
exclusion_name: str,
start_time: str):
pulumi.set(__self__, "end_time", end_time)
pulumi.set(__self__, "exclusion_name", exclusion_name)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="endTime")
def end_time(self) -> str:
return pulumi.get(self, "end_time")
@property
@pulumi.getter(name="exclusionName")
def exclusion_name(self) -> str:
return pulumi.get(self, "exclusion_name")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
return pulumi.get(self, "start_time")
@pulumi.output_type
class GetClusterMaintenancePolicyRecurringWindowResult(dict):
def __init__(__self__, *,
end_time: str,
recurrence: str,
start_time: str):
pulumi.set(__self__, "end_time", end_time)
pulumi.set(__self__, "recurrence", recurrence)
pulumi.set(__self__, "start_time", start_time)
@property
@pulumi.getter(name="endTime")
def end_time(self) -> str:
return pulumi.get(self, "end_time")
@property
@pulumi.getter
def recurrence(self) -> str:
return pulumi.get(self, "recurrence")
@property
@pulumi.getter(name="startTime")
def start_time(self) -> str:
return pulumi.get(self, "start_time")
@pulumi.output_type
class GetClusterMasterAuthResult(dict):
def __init__(__self__, *,
client_certificate: str,
client_certificate_configs: Sequence['outputs.GetClusterMasterAuthClientCertificateConfigResult'],
client_key: str,
cluster_ca_certificate: str):
pulumi.set(__self__, "client_certificate", client_certificate)
pulumi.set(__self__, "client_certificate_configs", client_certificate_configs)
pulumi.set(__self__, "client_key", client_key)
pulumi.set(__self__, "cluster_ca_certificate", cluster_ca_certificate)
@property
@pulumi.getter(name="clientCertificate")
def client_certificate(self) -> str:
return pulumi.get(self, "client_certificate")
@property
@pulumi.getter(name="clientCertificateConfigs")
def client_certificate_configs(self) -> Sequence['outputs.GetClusterMasterAuthClientCertificateConfigResult']:
return pulumi.get(self, "client_certificate_configs")
@property
@pulumi.getter(name="clientKey")
def client_key(self) -> str:
return pulumi.get(self, "client_key")
@property
@pulumi.getter(name="clusterCaCertificate")
def cluster_ca_certificate(self) -> str:
return pulumi.get(self, "cluster_ca_certificate")
@pulumi.output_type
class GetClusterMasterAuthClientCertificateConfigResult(dict):
def __init__(__self__, *,
issue_client_certificate: bool):
pulumi.set(__self__, "issue_client_certificate", issue_client_certificate)
@property
@pulumi.getter(name="issueClientCertificate")
def issue_client_certificate(self) -> bool:
return pulumi.get(self, "issue_client_certificate")
@pulumi.output_type
class GetClusterMasterAuthorizedNetworksConfigResult(dict):
def __init__(__self__, *,
cidr_blocks: Sequence['outputs.GetClusterMasterAuthorizedNetworksConfigCidrBlockResult']):
pulumi.set(__self__, "cidr_blocks", cidr_blocks)
@property
@pulumi.getter(name="cidrBlocks")
def cidr_blocks(self) -> Sequence['outputs.GetClusterMasterAuthorizedNetworksConfigCidrBlockResult']:
return pulumi.get(self, "cidr_blocks")
@pulumi.output_type
class GetClusterMasterAuthorizedNetworksConfigCidrBlockResult(dict):
def __init__(__self__, *,
cidr_block: str,
display_name: str):
pulumi.set(__self__, "cidr_block", cidr_block)
pulumi.set(__self__, "display_name", display_name)
@property
@pulumi.getter(name="cidrBlock")
def cidr_block(self) -> str:
return pulumi.get(self, "cidr_block")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> str:
return pulumi.get(self, "display_name")
@pulumi.output_type
class GetClusterMonitoringConfigResult(dict):
def __init__(__self__, *,
enable_components: Sequence[str]):
pulumi.set(__self__, "enable_components", enable_components)
@property
@pulumi.getter(name="enableComponents")
def enable_components(self) -> Sequence[str]:
return pulumi.get(self, "enable_components")
@pulumi.output_type
class GetClusterNetworkPolicyResult(dict):
def __init__(__self__, *,
enabled: bool,
provider: str):
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "provider", provider)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def provider(self) -> str:
return pulumi.get(self, "provider")
@pulumi.output_type
class GetClusterNodeConfigResult(dict):
def __init__(__self__, *,
boot_disk_kms_key: str,
disk_size_gb: int,
disk_type: str,
ephemeral_storage_configs: Sequence['outputs.GetClusterNodeConfigEphemeralStorageConfigResult'],
gcfs_configs: Sequence['outputs.GetClusterNodeConfigGcfsConfigResult'],
guest_accelerators: Sequence['outputs.GetClusterNodeConfigGuestAcceleratorResult'],
image_type: str,
kubelet_configs: Sequence['outputs.GetClusterNodeConfigKubeletConfigResult'],
labels: Mapping[str, str],
linux_node_configs: Sequence['outputs.GetClusterNodeConfigLinuxNodeConfigResult'],
local_ssd_count: int,
machine_type: str,
metadata: Mapping[str, str],
min_cpu_platform: str,
node_group: str,
oauth_scopes: Sequence[str],
preemptible: bool,
sandbox_configs: Sequence['outputs.GetClusterNodeConfigSandboxConfigResult'],
service_account: str,
shielded_instance_configs: Sequence['outputs.GetClusterNodeConfigShieldedInstanceConfigResult'],
spot: bool,
tags: Sequence[str],
taints: Sequence['outputs.GetClusterNodeConfigTaintResult'],
workload_metadata_configs: Sequence['outputs.GetClusterNodeConfigWorkloadMetadataConfigResult']):
pulumi.set(__self__, "boot_disk_kms_key", boot_disk_kms_key)
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
pulumi.set(__self__, "disk_type", disk_type)
pulumi.set(__self__, "ephemeral_storage_configs", ephemeral_storage_configs)
pulumi.set(__self__, "gcfs_configs", gcfs_configs)
pulumi.set(__self__, "guest_accelerators", guest_accelerators)
pulumi.set(__self__, "image_type", image_type)
pulumi.set(__self__, "kubelet_configs", kubelet_configs)
pulumi.set(__self__, "labels", labels)
pulumi.set(__self__, "linux_node_configs", linux_node_configs)
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
pulumi.set(__self__, "machine_type", machine_type)
pulumi.set(__self__, "metadata", metadata)
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
pulumi.set(__self__, "node_group", node_group)
pulumi.set(__self__, "oauth_scopes", oauth_scopes)
pulumi.set(__self__, "preemptible", preemptible)
pulumi.set(__self__, "sandbox_configs", sandbox_configs)
pulumi.set(__self__, "service_account", service_account)
pulumi.set(__self__, "shielded_instance_configs", shielded_instance_configs)
pulumi.set(__self__, "spot", spot)
pulumi.set(__self__, "tags", tags)
pulumi.set(__self__, "taints", taints)
pulumi.set(__self__, "workload_metadata_configs", workload_metadata_configs)
@property
@pulumi.getter(name="bootDiskKmsKey")
def boot_disk_kms_key(self) -> str:
return pulumi.get(self, "boot_disk_kms_key")
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> int:
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> str:
return pulumi.get(self, "disk_type")
@property
@pulumi.getter(name="ephemeralStorageConfigs")
def ephemeral_storage_configs(self) -> Sequence['outputs.GetClusterNodeConfigEphemeralStorageConfigResult']:
return pulumi.get(self, "ephemeral_storage_configs")
@property
@pulumi.getter(name="gcfsConfigs")
def gcfs_configs(self) -> Sequence['outputs.GetClusterNodeConfigGcfsConfigResult']:
return pulumi.get(self, "gcfs_configs")
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> Sequence['outputs.GetClusterNodeConfigGuestAcceleratorResult']:
return pulumi.get(self, "guest_accelerators")
@property
@pulumi.getter(name="imageType")
def image_type(self) -> str:
return pulumi.get(self, "image_type")
@property
@pulumi.getter(name="kubeletConfigs")
def kubelet_configs(self) -> Sequence['outputs.GetClusterNodeConfigKubeletConfigResult']:
return pulumi.get(self, "kubelet_configs")
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="linuxNodeConfigs")
def linux_node_configs(self) -> Sequence['outputs.GetClusterNodeConfigLinuxNodeConfigResult']:
return pulumi.get(self, "linux_node_configs")
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> int:
return pulumi.get(self, "local_ssd_count")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> str:
return pulumi.get(self, "machine_type")
@property
@pulumi.getter
def metadata(self) -> Mapping[str, str]:
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> str:
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter(name="nodeGroup")
def node_group(self) -> str:
return pulumi.get(self, "node_group")
@property
@pulumi.getter(name="oauthScopes")
def oauth_scopes(self) -> Sequence[str]:
return pulumi.get(self, "oauth_scopes")
@property
@pulumi.getter
def preemptible(self) -> bool:
return pulumi.get(self, "preemptible")
@property
@pulumi.getter(name="sandboxConfigs")
def sandbox_configs(self) -> Sequence['outputs.GetClusterNodeConfigSandboxConfigResult']:
return pulumi.get(self, "sandbox_configs")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> str:
return pulumi.get(self, "service_account")
@property
@pulumi.getter(name="shieldedInstanceConfigs")
def shielded_instance_configs(self) -> Sequence['outputs.GetClusterNodeConfigShieldedInstanceConfigResult']:
return pulumi.get(self, "shielded_instance_configs")
@property
@pulumi.getter
def spot(self) -> bool:
return pulumi.get(self, "spot")
@property
@pulumi.getter
def tags(self) -> Sequence[str]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def taints(self) -> Sequence['outputs.GetClusterNodeConfigTaintResult']:
return pulumi.get(self, "taints")
@property
@pulumi.getter(name="workloadMetadataConfigs")
def workload_metadata_configs(self) -> Sequence['outputs.GetClusterNodeConfigWorkloadMetadataConfigResult']:
return pulumi.get(self, "workload_metadata_configs")
@pulumi.output_type
class GetClusterNodeConfigEphemeralStorageConfigResult(dict):
def __init__(__self__, *,
local_ssd_count: int):
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> int:
return pulumi.get(self, "local_ssd_count")
@pulumi.output_type
class GetClusterNodeConfigGcfsConfigResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterNodeConfigGuestAcceleratorResult(dict):
def __init__(__self__, *,
count: int,
gpu_partition_size: str,
type: str):
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "gpu_partition_size", gpu_partition_size)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def count(self) -> int:
return pulumi.get(self, "count")
@property
@pulumi.getter(name="gpuPartitionSize")
def gpu_partition_size(self) -> str:
return pulumi.get(self, "gpu_partition_size")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
@pulumi.output_type
class GetClusterNodeConfigKubeletConfigResult(dict):
def __init__(__self__, *,
cpu_cfs_quota: bool,
cpu_cfs_quota_period: str,
cpu_manager_policy: str):
pulumi.set(__self__, "cpu_cfs_quota", cpu_cfs_quota)
pulumi.set(__self__, "cpu_cfs_quota_period", cpu_cfs_quota_period)
pulumi.set(__self__, "cpu_manager_policy", cpu_manager_policy)
@property
@pulumi.getter(name="cpuCfsQuota")
def cpu_cfs_quota(self) -> bool:
return pulumi.get(self, "cpu_cfs_quota")
@property
@pulumi.getter(name="cpuCfsQuotaPeriod")
def cpu_cfs_quota_period(self) -> str:
return pulumi.get(self, "cpu_cfs_quota_period")
@property
@pulumi.getter(name="cpuManagerPolicy")
def cpu_manager_policy(self) -> str:
return pulumi.get(self, "cpu_manager_policy")
@pulumi.output_type
class GetClusterNodeConfigLinuxNodeConfigResult(dict):
def __init__(__self__, *,
sysctls: Mapping[str, str]):
pulumi.set(__self__, "sysctls", sysctls)
@property
@pulumi.getter
def sysctls(self) -> Mapping[str, str]:
return pulumi.get(self, "sysctls")
@pulumi.output_type
class GetClusterNodeConfigSandboxConfigResult(dict):
def __init__(__self__, *,
sandbox_type: str):
pulumi.set(__self__, "sandbox_type", sandbox_type)
@property
@pulumi.getter(name="sandboxType")
def sandbox_type(self) -> str:
return pulumi.get(self, "sandbox_type")
@pulumi.output_type
class GetClusterNodeConfigShieldedInstanceConfigResult(dict):
def __init__(__self__, *,
enable_integrity_monitoring: bool,
enable_secure_boot: bool):
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> bool:
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> bool:
return pulumi.get(self, "enable_secure_boot")
@pulumi.output_type
class GetClusterNodeConfigTaintResult(dict):
def __init__(__self__, *,
effect: str,
key: str,
value: str):
pulumi.set(__self__, "effect", effect)
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def effect(self) -> str:
return pulumi.get(self, "effect")
@property
@pulumi.getter
def key(self) -> str:
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
return pulumi.get(self, "value")
@pulumi.output_type
class GetClusterNodeConfigWorkloadMetadataConfigResult(dict):
def __init__(__self__, *,
mode: str):
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def mode(self) -> str:
return pulumi.get(self, "mode")
@pulumi.output_type
class GetClusterNodePoolResult(dict):
def __init__(__self__, *,
autoscalings: Sequence['outputs.GetClusterNodePoolAutoscalingResult'],
initial_node_count: int,
instance_group_urls: Sequence[str],
managed_instance_group_urls: Sequence[str],
managements: Sequence['outputs.GetClusterNodePoolManagementResult'],
max_pods_per_node: int,
name: str,
name_prefix: str,
network_configs: Sequence['outputs.GetClusterNodePoolNetworkConfigResult'],
node_configs: Sequence['outputs.GetClusterNodePoolNodeConfigResult'],
node_count: int,
node_locations: Sequence[str],
upgrade_settings: Sequence['outputs.GetClusterNodePoolUpgradeSettingResult'],
version: str):
"""
:param str name: The name of the cluster.
"""
pulumi.set(__self__, "autoscalings", autoscalings)
pulumi.set(__self__, "initial_node_count", initial_node_count)
pulumi.set(__self__, "instance_group_urls", instance_group_urls)
pulumi.set(__self__, "managed_instance_group_urls", managed_instance_group_urls)
pulumi.set(__self__, "managements", managements)
pulumi.set(__self__, "max_pods_per_node", max_pods_per_node)
pulumi.set(__self__, "name", name)
pulumi.set(__self__, "name_prefix", name_prefix)
pulumi.set(__self__, "network_configs", network_configs)
pulumi.set(__self__, "node_configs", node_configs)
pulumi.set(__self__, "node_count", node_count)
pulumi.set(__self__, "node_locations", node_locations)
pulumi.set(__self__, "upgrade_settings", upgrade_settings)
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def autoscalings(self) -> Sequence['outputs.GetClusterNodePoolAutoscalingResult']:
return pulumi.get(self, "autoscalings")
@property
@pulumi.getter(name="initialNodeCount")
def initial_node_count(self) -> int:
return pulumi.get(self, "initial_node_count")
@property
@pulumi.getter(name="instanceGroupUrls")
def instance_group_urls(self) -> Sequence[str]:
return pulumi.get(self, "instance_group_urls")
@property
@pulumi.getter(name="managedInstanceGroupUrls")
def managed_instance_group_urls(self) -> Sequence[str]:
return pulumi.get(self, "managed_instance_group_urls")
@property
@pulumi.getter
def managements(self) -> Sequence['outputs.GetClusterNodePoolManagementResult']:
return pulumi.get(self, "managements")
@property
@pulumi.getter(name="maxPodsPerNode")
def max_pods_per_node(self) -> int:
return pulumi.get(self, "max_pods_per_node")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the cluster.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="namePrefix")
def name_prefix(self) -> str:
return pulumi.get(self, "name_prefix")
@property
@pulumi.getter(name="networkConfigs")
def network_configs(self) -> Sequence['outputs.GetClusterNodePoolNetworkConfigResult']:
return pulumi.get(self, "network_configs")
@property
@pulumi.getter(name="nodeConfigs")
def node_configs(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigResult']:
return pulumi.get(self, "node_configs")
@property
@pulumi.getter(name="nodeCount")
def node_count(self) -> int:
return pulumi.get(self, "node_count")
@property
@pulumi.getter(name="nodeLocations")
def node_locations(self) -> Sequence[str]:
return pulumi.get(self, "node_locations")
@property
@pulumi.getter(name="upgradeSettings")
def upgrade_settings(self) -> Sequence['outputs.GetClusterNodePoolUpgradeSettingResult']:
return pulumi.get(self, "upgrade_settings")
@property
@pulumi.getter
def version(self) -> str:
return pulumi.get(self, "version")
@pulumi.output_type
class GetClusterNodePoolAutoscalingResult(dict):
def __init__(__self__, *,
max_node_count: int,
min_node_count: int):
pulumi.set(__self__, "max_node_count", max_node_count)
pulumi.set(__self__, "min_node_count", min_node_count)
@property
@pulumi.getter(name="maxNodeCount")
def max_node_count(self) -> int:
return pulumi.get(self, "max_node_count")
@property
@pulumi.getter(name="minNodeCount")
def min_node_count(self) -> int:
return pulumi.get(self, "min_node_count")
@pulumi.output_type
class GetClusterNodePoolManagementResult(dict):
def __init__(__self__, *,
auto_repair: bool,
auto_upgrade: bool):
pulumi.set(__self__, "auto_repair", auto_repair)
pulumi.set(__self__, "auto_upgrade", auto_upgrade)
@property
@pulumi.getter(name="autoRepair")
def auto_repair(self) -> bool:
return pulumi.get(self, "auto_repair")
@property
@pulumi.getter(name="autoUpgrade")
def auto_upgrade(self) -> bool:
return pulumi.get(self, "auto_upgrade")
@pulumi.output_type
class GetClusterNodePoolNetworkConfigResult(dict):
def __init__(__self__, *,
create_pod_range: bool,
pod_ipv4_cidr_block: str,
pod_range: str):
pulumi.set(__self__, "create_pod_range", create_pod_range)
pulumi.set(__self__, "pod_ipv4_cidr_block", pod_ipv4_cidr_block)
pulumi.set(__self__, "pod_range", pod_range)
@property
@pulumi.getter(name="createPodRange")
def create_pod_range(self) -> bool:
return pulumi.get(self, "create_pod_range")
@property
@pulumi.getter(name="podIpv4CidrBlock")
def pod_ipv4_cidr_block(self) -> str:
return pulumi.get(self, "pod_ipv4_cidr_block")
@property
@pulumi.getter(name="podRange")
def pod_range(self) -> str:
return pulumi.get(self, "pod_range")
@pulumi.output_type
class GetClusterNodePoolNodeConfigResult(dict):
def __init__(__self__, *,
boot_disk_kms_key: str,
disk_size_gb: int,
disk_type: str,
ephemeral_storage_configs: Sequence['outputs.GetClusterNodePoolNodeConfigEphemeralStorageConfigResult'],
gcfs_configs: Sequence['outputs.GetClusterNodePoolNodeConfigGcfsConfigResult'],
guest_accelerators: Sequence['outputs.GetClusterNodePoolNodeConfigGuestAcceleratorResult'],
image_type: str,
kubelet_configs: Sequence['outputs.GetClusterNodePoolNodeConfigKubeletConfigResult'],
labels: Mapping[str, str],
linux_node_configs: Sequence['outputs.GetClusterNodePoolNodeConfigLinuxNodeConfigResult'],
local_ssd_count: int,
machine_type: str,
metadata: Mapping[str, str],
min_cpu_platform: str,
node_group: str,
oauth_scopes: Sequence[str],
preemptible: bool,
sandbox_configs: Sequence['outputs.GetClusterNodePoolNodeConfigSandboxConfigResult'],
service_account: str,
shielded_instance_configs: Sequence['outputs.GetClusterNodePoolNodeConfigShieldedInstanceConfigResult'],
spot: bool,
tags: Sequence[str],
taints: Sequence['outputs.GetClusterNodePoolNodeConfigTaintResult'],
workload_metadata_configs: Sequence['outputs.GetClusterNodePoolNodeConfigWorkloadMetadataConfigResult']):
pulumi.set(__self__, "boot_disk_kms_key", boot_disk_kms_key)
pulumi.set(__self__, "disk_size_gb", disk_size_gb)
pulumi.set(__self__, "disk_type", disk_type)
pulumi.set(__self__, "ephemeral_storage_configs", ephemeral_storage_configs)
pulumi.set(__self__, "gcfs_configs", gcfs_configs)
pulumi.set(__self__, "guest_accelerators", guest_accelerators)
pulumi.set(__self__, "image_type", image_type)
pulumi.set(__self__, "kubelet_configs", kubelet_configs)
pulumi.set(__self__, "labels", labels)
pulumi.set(__self__, "linux_node_configs", linux_node_configs)
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
pulumi.set(__self__, "machine_type", machine_type)
pulumi.set(__self__, "metadata", metadata)
pulumi.set(__self__, "min_cpu_platform", min_cpu_platform)
pulumi.set(__self__, "node_group", node_group)
pulumi.set(__self__, "oauth_scopes", oauth_scopes)
pulumi.set(__self__, "preemptible", preemptible)
pulumi.set(__self__, "sandbox_configs", sandbox_configs)
pulumi.set(__self__, "service_account", service_account)
pulumi.set(__self__, "shielded_instance_configs", shielded_instance_configs)
pulumi.set(__self__, "spot", spot)
pulumi.set(__self__, "tags", tags)
pulumi.set(__self__, "taints", taints)
pulumi.set(__self__, "workload_metadata_configs", workload_metadata_configs)
@property
@pulumi.getter(name="bootDiskKmsKey")
def boot_disk_kms_key(self) -> str:
return pulumi.get(self, "boot_disk_kms_key")
@property
@pulumi.getter(name="diskSizeGb")
def disk_size_gb(self) -> int:
return pulumi.get(self, "disk_size_gb")
@property
@pulumi.getter(name="diskType")
def disk_type(self) -> str:
return pulumi.get(self, "disk_type")
@property
@pulumi.getter(name="ephemeralStorageConfigs")
def ephemeral_storage_configs(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigEphemeralStorageConfigResult']:
return pulumi.get(self, "ephemeral_storage_configs")
@property
@pulumi.getter(name="gcfsConfigs")
def gcfs_configs(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigGcfsConfigResult']:
return pulumi.get(self, "gcfs_configs")
@property
@pulumi.getter(name="guestAccelerators")
def guest_accelerators(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigGuestAcceleratorResult']:
return pulumi.get(self, "guest_accelerators")
@property
@pulumi.getter(name="imageType")
def image_type(self) -> str:
return pulumi.get(self, "image_type")
@property
@pulumi.getter(name="kubeletConfigs")
def kubelet_configs(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigKubeletConfigResult']:
return pulumi.get(self, "kubelet_configs")
@property
@pulumi.getter
def labels(self) -> Mapping[str, str]:
return pulumi.get(self, "labels")
@property
@pulumi.getter(name="linuxNodeConfigs")
def linux_node_configs(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigLinuxNodeConfigResult']:
return pulumi.get(self, "linux_node_configs")
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> int:
return pulumi.get(self, "local_ssd_count")
@property
@pulumi.getter(name="machineType")
def machine_type(self) -> str:
return pulumi.get(self, "machine_type")
@property
@pulumi.getter
def metadata(self) -> Mapping[str, str]:
return pulumi.get(self, "metadata")
@property
@pulumi.getter(name="minCpuPlatform")
def min_cpu_platform(self) -> str:
return pulumi.get(self, "min_cpu_platform")
@property
@pulumi.getter(name="nodeGroup")
def node_group(self) -> str:
return pulumi.get(self, "node_group")
@property
@pulumi.getter(name="oauthScopes")
def oauth_scopes(self) -> Sequence[str]:
return pulumi.get(self, "oauth_scopes")
@property
@pulumi.getter
def preemptible(self) -> bool:
return pulumi.get(self, "preemptible")
@property
@pulumi.getter(name="sandboxConfigs")
def sandbox_configs(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigSandboxConfigResult']:
return pulumi.get(self, "sandbox_configs")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> str:
return pulumi.get(self, "service_account")
@property
@pulumi.getter(name="shieldedInstanceConfigs")
def shielded_instance_configs(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigShieldedInstanceConfigResult']:
return pulumi.get(self, "shielded_instance_configs")
@property
@pulumi.getter
def spot(self) -> bool:
return pulumi.get(self, "spot")
@property
@pulumi.getter
def tags(self) -> Sequence[str]:
return pulumi.get(self, "tags")
@property
@pulumi.getter
def taints(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigTaintResult']:
return pulumi.get(self, "taints")
@property
@pulumi.getter(name="workloadMetadataConfigs")
def workload_metadata_configs(self) -> Sequence['outputs.GetClusterNodePoolNodeConfigWorkloadMetadataConfigResult']:
return pulumi.get(self, "workload_metadata_configs")
@pulumi.output_type
class GetClusterNodePoolNodeConfigEphemeralStorageConfigResult(dict):
def __init__(__self__, *,
local_ssd_count: int):
pulumi.set(__self__, "local_ssd_count", local_ssd_count)
@property
@pulumi.getter(name="localSsdCount")
def local_ssd_count(self) -> int:
return pulumi.get(self, "local_ssd_count")
@pulumi.output_type
class GetClusterNodePoolNodeConfigGcfsConfigResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterNodePoolNodeConfigGuestAcceleratorResult(dict):
def __init__(__self__, *,
count: int,
gpu_partition_size: str,
type: str):
pulumi.set(__self__, "count", count)
pulumi.set(__self__, "gpu_partition_size", gpu_partition_size)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def count(self) -> int:
return pulumi.get(self, "count")
@property
@pulumi.getter(name="gpuPartitionSize")
def gpu_partition_size(self) -> str:
return pulumi.get(self, "gpu_partition_size")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
@pulumi.output_type
class GetClusterNodePoolNodeConfigKubeletConfigResult(dict):
def __init__(__self__, *,
cpu_cfs_quota: bool,
cpu_cfs_quota_period: str,
cpu_manager_policy: str):
pulumi.set(__self__, "cpu_cfs_quota", cpu_cfs_quota)
pulumi.set(__self__, "cpu_cfs_quota_period", cpu_cfs_quota_period)
pulumi.set(__self__, "cpu_manager_policy", cpu_manager_policy)
@property
@pulumi.getter(name="cpuCfsQuota")
def cpu_cfs_quota(self) -> bool:
return pulumi.get(self, "cpu_cfs_quota")
@property
@pulumi.getter(name="cpuCfsQuotaPeriod")
def cpu_cfs_quota_period(self) -> str:
return pulumi.get(self, "cpu_cfs_quota_period")
@property
@pulumi.getter(name="cpuManagerPolicy")
def cpu_manager_policy(self) -> str:
return pulumi.get(self, "cpu_manager_policy")
@pulumi.output_type
class GetClusterNodePoolNodeConfigLinuxNodeConfigResult(dict):
def __init__(__self__, *,
sysctls: Mapping[str, str]):
pulumi.set(__self__, "sysctls", sysctls)
@property
@pulumi.getter
def sysctls(self) -> Mapping[str, str]:
return pulumi.get(self, "sysctls")
@pulumi.output_type
class GetClusterNodePoolNodeConfigSandboxConfigResult(dict):
def __init__(__self__, *,
sandbox_type: str):
pulumi.set(__self__, "sandbox_type", sandbox_type)
@property
@pulumi.getter(name="sandboxType")
def sandbox_type(self) -> str:
return pulumi.get(self, "sandbox_type")
@pulumi.output_type
class GetClusterNodePoolNodeConfigShieldedInstanceConfigResult(dict):
def __init__(__self__, *,
enable_integrity_monitoring: bool,
enable_secure_boot: bool):
pulumi.set(__self__, "enable_integrity_monitoring", enable_integrity_monitoring)
pulumi.set(__self__, "enable_secure_boot", enable_secure_boot)
@property
@pulumi.getter(name="enableIntegrityMonitoring")
def enable_integrity_monitoring(self) -> bool:
return pulumi.get(self, "enable_integrity_monitoring")
@property
@pulumi.getter(name="enableSecureBoot")
def enable_secure_boot(self) -> bool:
return pulumi.get(self, "enable_secure_boot")
@pulumi.output_type
class GetClusterNodePoolNodeConfigTaintResult(dict):
def __init__(__self__, *,
effect: str,
key: str,
value: str):
pulumi.set(__self__, "effect", effect)
pulumi.set(__self__, "key", key)
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def effect(self) -> str:
return pulumi.get(self, "effect")
@property
@pulumi.getter
def key(self) -> str:
return pulumi.get(self, "key")
@property
@pulumi.getter
def value(self) -> str:
return pulumi.get(self, "value")
@pulumi.output_type
class GetClusterNodePoolNodeConfigWorkloadMetadataConfigResult(dict):
def __init__(__self__, *,
mode: str):
pulumi.set(__self__, "mode", mode)
@property
@pulumi.getter
def mode(self) -> str:
return pulumi.get(self, "mode")
@pulumi.output_type
class GetClusterNodePoolUpgradeSettingResult(dict):
def __init__(__self__, *,
max_surge: int,
max_unavailable: int):
pulumi.set(__self__, "max_surge", max_surge)
pulumi.set(__self__, "max_unavailable", max_unavailable)
@property
@pulumi.getter(name="maxSurge")
def max_surge(self) -> int:
return pulumi.get(self, "max_surge")
@property
@pulumi.getter(name="maxUnavailable")
def max_unavailable(self) -> int:
return pulumi.get(self, "max_unavailable")
@pulumi.output_type
class GetClusterNotificationConfigResult(dict):
def __init__(__self__, *,
pubsubs: Sequence['outputs.GetClusterNotificationConfigPubsubResult']):
pulumi.set(__self__, "pubsubs", pubsubs)
@property
@pulumi.getter
def pubsubs(self) -> Sequence['outputs.GetClusterNotificationConfigPubsubResult']:
return pulumi.get(self, "pubsubs")
@pulumi.output_type
class GetClusterNotificationConfigPubsubResult(dict):
def __init__(__self__, *,
enabled: bool,
topic: str):
pulumi.set(__self__, "enabled", enabled)
pulumi.set(__self__, "topic", topic)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@property
@pulumi.getter
def topic(self) -> str:
return pulumi.get(self, "topic")
@pulumi.output_type
class GetClusterPodSecurityPolicyConfigResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterPrivateClusterConfigResult(dict):
def __init__(__self__, *,
enable_private_endpoint: bool,
enable_private_nodes: bool,
master_global_access_configs: Sequence['outputs.GetClusterPrivateClusterConfigMasterGlobalAccessConfigResult'],
master_ipv4_cidr_block: str,
peering_name: str,
private_endpoint: str,
public_endpoint: str):
pulumi.set(__self__, "enable_private_endpoint", enable_private_endpoint)
pulumi.set(__self__, "enable_private_nodes", enable_private_nodes)
pulumi.set(__self__, "master_global_access_configs", master_global_access_configs)
pulumi.set(__self__, "master_ipv4_cidr_block", master_ipv4_cidr_block)
pulumi.set(__self__, "peering_name", peering_name)
pulumi.set(__self__, "private_endpoint", private_endpoint)
pulumi.set(__self__, "public_endpoint", public_endpoint)
@property
@pulumi.getter(name="enablePrivateEndpoint")
def enable_private_endpoint(self) -> bool:
return pulumi.get(self, "enable_private_endpoint")
@property
@pulumi.getter(name="enablePrivateNodes")
def enable_private_nodes(self) -> bool:
return pulumi.get(self, "enable_private_nodes")
@property
@pulumi.getter(name="masterGlobalAccessConfigs")
def master_global_access_configs(self) -> Sequence['outputs.GetClusterPrivateClusterConfigMasterGlobalAccessConfigResult']:
return pulumi.get(self, "master_global_access_configs")
@property
@pulumi.getter(name="masterIpv4CidrBlock")
def master_ipv4_cidr_block(self) -> str:
return pulumi.get(self, "master_ipv4_cidr_block")
@property
@pulumi.getter(name="peeringName")
def peering_name(self) -> str:
return pulumi.get(self, "peering_name")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> str:
return pulumi.get(self, "private_endpoint")
@property
@pulumi.getter(name="publicEndpoint")
def public_endpoint(self) -> str:
return pulumi.get(self, "public_endpoint")
@pulumi.output_type
class GetClusterPrivateClusterConfigMasterGlobalAccessConfigResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterReleaseChannelResult(dict):
def __init__(__self__, *,
channel: str):
pulumi.set(__self__, "channel", channel)
@property
@pulumi.getter
def channel(self) -> str:
return pulumi.get(self, "channel")
@pulumi.output_type
class GetClusterResourceUsageExportConfigResult(dict):
def __init__(__self__, *,
bigquery_destinations: Sequence['outputs.GetClusterResourceUsageExportConfigBigqueryDestinationResult'],
enable_network_egress_metering: bool,
enable_resource_consumption_metering: bool):
pulumi.set(__self__, "bigquery_destinations", bigquery_destinations)
pulumi.set(__self__, "enable_network_egress_metering", enable_network_egress_metering)
pulumi.set(__self__, "enable_resource_consumption_metering", enable_resource_consumption_metering)
@property
@pulumi.getter(name="bigqueryDestinations")
def bigquery_destinations(self) -> Sequence['outputs.GetClusterResourceUsageExportConfigBigqueryDestinationResult']:
return pulumi.get(self, "bigquery_destinations")
@property
@pulumi.getter(name="enableNetworkEgressMetering")
def enable_network_egress_metering(self) -> bool:
return pulumi.get(self, "enable_network_egress_metering")
@property
@pulumi.getter(name="enableResourceConsumptionMetering")
def enable_resource_consumption_metering(self) -> bool:
return pulumi.get(self, "enable_resource_consumption_metering")
@pulumi.output_type
class GetClusterResourceUsageExportConfigBigqueryDestinationResult(dict):
def __init__(__self__, *,
dataset_id: str):
pulumi.set(__self__, "dataset_id", dataset_id)
@property
@pulumi.getter(name="datasetId")
def dataset_id(self) -> str:
return pulumi.get(self, "dataset_id")
@pulumi.output_type
class GetClusterVerticalPodAutoscalingResult(dict):
def __init__(__self__, *,
enabled: bool):
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> bool:
return pulumi.get(self, "enabled")
@pulumi.output_type
class GetClusterWorkloadIdentityConfigResult(dict):
def __init__(__self__, *,
workload_pool: str):
pulumi.set(__self__, "workload_pool", workload_pool)
@property
@pulumi.getter(name="workloadPool")
def workload_pool(self) -> str:
return pulumi.get(self, "workload_pool")
| 42.195637
| 413
| 0.66736
| 39,558
| 351,996
| 5.704788
| 0.031953
| 0.018704
| 0.030013
| 0.043865
| 0.824394
| 0.807617
| 0.796575
| 0.761306
| 0.753503
| 0.741109
| 0
| 0.003247
| 0.238835
| 351,996
| 8,341
| 414
| 42.200695
| 0.839035
| 0.275574
| 0
| 0.776596
| 1
| 0.01504
| 0.217641
| 0.106545
| 0
| 0
| 0
| 0
| 0
| 1
| 0.174248
| false
| 0
| 0.001101
| 0.048423
| 0.334556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7d5cdb2cb90253a55299af293aa0c1fa39590a8b
| 192
|
py
|
Python
|
FastFold/Kernel/jit/__init__.py
|
YaoYinYing/OpenFold2
|
57fd3cfba0bc70a2ca4c6943ba00e1c4892c1945
|
[
"MIT"
] | null | null | null |
FastFold/Kernel/jit/__init__.py
|
YaoYinYing/OpenFold2
|
57fd3cfba0bc70a2ca4c6943ba00e1c4892c1945
|
[
"MIT"
] | null | null | null |
FastFold/Kernel/jit/__init__.py
|
YaoYinYing/OpenFold2
|
57fd3cfba0bc70a2ca4c6943ba00e1c4892c1945
|
[
"MIT"
] | null | null | null |
#######################################
#https://github.com/hpcaitech/FastFold#
#######################################
from .options import _set_jit_fusion_options
_set_jit_fusion_options()
| 27.428571
| 44
| 0.489583
| 16
| 192
| 5.375
| 0.6875
| 0.139535
| 0.27907
| 0.44186
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046875
| 192
| 7
| 45
| 27.428571
| 0.469945
| 0.192708
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
7d6a186c9c66ab9f12ea75600984f94fa44006fe
| 7,036
|
py
|
Python
|
BikeNet/BikeNet/assignment/graph.py
|
hkujy/BikeNetHyper
|
df22eb7a9a62eb48a863fefd5ec4d27abf68b9de
|
[
"MIT"
] | null | null | null |
BikeNet/BikeNet/assignment/graph.py
|
hkujy/BikeNetHyper
|
df22eb7a9a62eb48a863fefd5ec4d27abf68b9de
|
[
"MIT"
] | null | null | null |
BikeNet/BikeNet/assignment/graph.py
|
hkujy/BikeNetHyper
|
df22eb7a9a62eb48a863fefd5ec4d27abf68b9de
|
[
"MIT"
] | null | null | null |
"""
contain the graph class
"""
import numpy as np
class edge_a:
def __init__(self, edge_info):
self.id = edge_info[0]
self.pointer = vertex_a(edge_info[1])
self.pointee = vertex_a(edge_info[2])
self.fft = float(edge_info[3]) # fre flow travel time
self.capacity1 = float(edge_info[4])
self.alpha = float(edge_info[5])
self.beta = float(edge_info[6])
self.cost = float('inf')
self.volume1 = 0 # flow
self.volume2 = 0
self.capacity2 = float(edge_info[7].strip())
# calculate the weight by BPR function:
def cal_weight1(self,volume1,volume2,_label_lane):
"""
BPR_function
"""
# _label_lane==1 means this lane is selected and will be devided into exclusive bike lane and motor lane
if _label_lane==1:
self.cost = self.fft*(1+self.alpha*np.power(volume1/self.capacity1, self.beta))
# print("@@@@@@@@@@@@@@11111111111111",self.cost)
else:
self.cost = self.fft*(1+self.alpha*np.power((volume1+volume2)/(self.capacity1+self.capacity2),self.beta))
return self.cost
def __eq__(self, other):
"""
compare two edges are equal or not
not clear where to use
"""
if isinstance(other, self.__class__):
return (self.pointer.id == other.pointer.id) and (self.pointee.id == other.pointee.id)
else:
return False
class vertex_a:
def __init__(self, node_id):
self.id = node_id
self.tails = []
self.heads = []
self.prev = None
self.potential = float('inf')
def __cmp__(self, other):
"""
compare the label of two links
used in the shortest path
"""
return __cmp__(self.potential, other.potential)
class network_a:
"""
use "set" data type, which can not be assessed by index
"""
def __init__(self, netname):
self.name = netname
self.edge_id_set = set()
self.edgeset = {}
self.edgefullset={}
self.edgenode = {}
self.node_id_set = set()
self.nodeset = {}
def add_edge(self, edge):
self.edge_id_set.add(edge.id)
self.edgeset[edge.id] = edge
self.edgefullset[(edge.pointer.id,edge.pointee.id)] = edge
self.edgenode[(edge.pointer.id, edge.pointee.id)] = edge.id
if edge.pointer.id not in self.node_id_set:
node = vertex_a(edge.pointer)
node.heads.append(edge)
self.nodeset[edge.pointer.id] = node
self.node_id_set.add(edge.pointer.id)
else:
self.nodeset[edge.pointer.id].heads.append(edge)
if edge.pointee.id not in self.node_id_set:
node = vertex_a(edge.pointee)
node.tails.append(edge)
self.nodeset[edge.pointee.id] = node
self.node_id_set.add(edge.pointee.id)
else:
self.nodeset[edge.pointee.id].tails.append(edge)
def init_cost1(self,_label_lane):
volume1 = {}
for l in self.edge_id_set:
volume1[l] = 0
volume2 = {}
for l in self.edge_id_set:
volume2[l] = 0
self.update_cost1(volume1,volume2,_label_lane)
def update_cost1(self,volume1,volume2,_label_lane):
for l in self.edgeset.keys():
for j in range(1,7):
if l in ["E{:0>3}".format(j)]:
self.edgeset[l].cal_weight1(volume1[l],volume2[l],_label_lane[j-1])
continue
class edge_b:
def __init__(self, edge_info):
self.id = edge_info[0]
self.pointer = vertex_b(edge_info[1])
self.pointee = vertex_b(edge_info[2])
self.fft = float(edge_info[3]) # fre flow travel time
self.capacity2 = float(edge_info[4])
self.alpha = float(edge_info[5])
self.beta = float(edge_info[6])
self.cost = float('inf')
self.volume1 = 0 # flow
self.volume2 = 0
self.capacity1=float(edge_info[7].strip())
# calculate the weight by BPR function:
def cal_weight2(self,volume1, volume2,_label_lane):
"""
BPR_function
"""
if _label_lane==1:
self.cost = self.fft*(1+self.alpha*np.power(volume2/self.capacity2, self.beta))
# print("@@@@@@@@@@@@@@22222222222222",self.cost)
else:
self.cost = self.fft*(1+self.alpha*np.power((volume1+volume2)/(self.capacity1+self.capacity2),self.beta))
return self.cost
def __eq__(self, other):
"""
compare two edges are equal or not
not clear where to use
"""
if isinstance(other, self.__class__):
return (self.pointer.id == other.pointer.id) and (self.pointee.id == other.pointee.id)
else:
return False
class vertex_b:
def __init__(self, node_id):
self.id = node_id
self.tails = []
self.heads = []
self.prev = None
self.potential = float('inf')
def __cmp__(self, other):
"""
compare the label of two links
used in the shortest path
"""
return __cmp__(self.potential, other.potential)
class network_b:
"""
use "set" data type, which can not be assessed by index
"""
def __init__(self, netname):
self.name = netname
self.edge_id_set = set()
self.edgeset = {}
self.edgefullset={}
self.edgenode = {}
self.node_id_set = set()
self.nodeset = {}
def add_edge(self, edge):
self.edge_id_set.add(edge.id)
self.edgeset[edge.id] = edge
self.edgefullset[(edge.pointer.id,edge.pointee.id)] = edge
self.edgenode[(edge.pointer.id, edge.pointee.id)] = edge.id
if edge.pointer.id not in self.node_id_set:
node = vertex_b(edge.pointer)
node.heads.append(edge)
self.nodeset[edge.pointer.id] = node
self.node_id_set.add(edge.pointer.id)
else:
self.nodeset[edge.pointer.id].heads.append(edge)
if edge.pointee.id not in self.node_id_set:
node = vertex_b(edge.pointee)
node.tails.append(edge)
self.nodeset[edge.pointee.id] = node
self.node_id_set.add(edge.pointee.id)
else:
self.nodeset[edge.pointee.id].tails.append(edge)
def init_cost2(self,_label_lane):
volume2 = {}
for l in self.edge_id_set:
volume2[l] = 0
volume1 = {}
for l in self.edge_id_set:
volume1[l] = 0
self.update_cost2(volume1,volume2,_label_lane)
def update_cost2(self,volume1,volume2,_label_lane):
for l in self.edgeset.keys():
for j in range(1,7):
if l in ["E{:0>3}".format(j)]:
self.edgeset[l].cal_weight2(volume1[l],volume2[l],_label_lane[j-1])
continue
| 33.345972
| 117
| 0.570068
| 927
| 7,036
| 4.145631
| 0.135922
| 0.037471
| 0.031226
| 0.033828
| 0.912568
| 0.903201
| 0.873016
| 0.85324
| 0.85324
| 0.835025
| 0
| 0.025236
| 0.307277
| 7,036
| 210
| 118
| 33.504762
| 0.763233
| 0.102757
| 0
| 0.794702
| 0
| 0
| 0.004303
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.119205
| false
| 0
| 0.006623
| 0
| 0.218543
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7d6e004f4e93dff908b9823bc188104b5c166771
| 28,217
|
py
|
Python
|
kinow_client/apis/groups_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | 1
|
2019-06-26T14:24:54.000Z
|
2019-06-26T14:24:54.000Z
|
kinow_client/apis/groups_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | null | null | null |
kinow_client/apis/groups_api.py
|
kinow-io/kinow-python-sdk
|
4c1699a3c78048b84287bd049a669651a5b4e2d5
|
[
"Apache-2.0"
] | 1
|
2018-02-01T10:08:40.000Z
|
2018-02-01T10:08:40.000Z
|
# coding: utf-8
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 1.4.58
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class GroupsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def attach_customer_to_group(self, group_id, customer_id, **kwargs):
"""
Attach customer to the group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.attach_customer_to_group(group_id, customer_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Group ID to fetch (required)
:param int customer_id: Customer ID to attach (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.attach_customer_to_group_with_http_info(group_id, customer_id, **kwargs)
else:
(data) = self.attach_customer_to_group_with_http_info(group_id, customer_id, **kwargs)
return data
def attach_customer_to_group_with_http_info(self, group_id, customer_id, **kwargs):
"""
Attach customer to the group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.attach_customer_to_group_with_http_info(group_id, customer_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Group ID to fetch (required)
:param int customer_id: Customer ID to attach (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'customer_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method attach_customer_to_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `attach_customer_to_group`")
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params) or (params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `attach_customer_to_group`")
collection_formats = {}
resource_path = '/groups/{group_id}/customers'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
if 'customer_id' in params:
form_params.append(('customer_id', params['customer_id']))
self.api_client.set_default_header('Content-Type', 'application/x-www-form-urlencoded')
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_group(self, body, **kwargs):
"""
Create new Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_group(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param GroupCreateRequest body: Group settings (required)
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_group_with_http_info(body, **kwargs)
else:
(data) = self.create_group_with_http_info(body, **kwargs)
return data
def create_group_with_http_info(self, body, **kwargs):
"""
Create new Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_group_with_http_info(body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param GroupCreateRequest body: Group settings (required)
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_group`")
collection_formats = {}
resource_path = '/groups'.replace('{format}', 'json')
path_params = {}
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
self.api_client.set_default_header('Content-Type', 'application/json')
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Group',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def detach_customer_from_group(self, group_id, customer_id, **kwargs):
"""
Detach customer from group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.detach_customer_from_group(group_id, customer_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Group ID to fetch (required)
:param int customer_id: Customer ID to attach (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.detach_customer_from_group_with_http_info(group_id, customer_id, **kwargs)
else:
(data) = self.detach_customer_from_group_with_http_info(group_id, customer_id, **kwargs)
return data
def detach_customer_from_group_with_http_info(self, group_id, customer_id, **kwargs):
"""
Detach customer from group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.detach_customer_from_group_with_http_info(group_id, customer_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Group ID to fetch (required)
:param int customer_id: Customer ID to attach (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'customer_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method detach_customer_from_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `detach_customer_from_group`")
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params) or (params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `detach_customer_from_group`")
collection_formats = {}
resource_path = '/groups/{group_id}/customers/{customer_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
if 'customer_id' in params:
path_params['customer_id'] = params['customer_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None,
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_group(self, group_id, **kwargs):
"""
Get Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_group(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Group ID to fetch (required)
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_group_with_http_info(group_id, **kwargs)
else:
(data) = self.get_group_with_http_info(group_id, **kwargs)
return data
def get_group_with_http_info(self, group_id, **kwargs):
"""
Get Group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_group_with_http_info(group_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int group_id: Group ID to fetch (required)
:return: Group
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_group`")
collection_formats = {}
resource_path = '/groups/{group_id}'.replace('{format}', 'json')
path_params = {}
if 'group_id' in params:
path_params['group_id'] = params['group_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Group',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_groups(self, **kwargs):
"""
Get group list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_groups(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page:
:param int per_page:
:param str filters: ``` email[value]=string&email[operator]=strict&firstname[value]=string&firstname[operator]=contains _______________ { \"email\": { \"value\": \"string\", \"operator\": \"strict\" }, \"firstname\": { \"value\": \"string\", \"operator\": \"contains\" } } ```Operator can be: strict, contains, between, in, gt (greater than), lt (lower than).
:param str sort_by: Sort by this attribute (id by default)
:param str sort_direction: Sorting direction (asc by default)
:return: Groups
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_groups_with_http_info(**kwargs)
else:
(data) = self.get_groups_with_http_info(**kwargs)
return data
def get_groups_with_http_info(self, **kwargs):
"""
Get group list
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_groups_with_http_info(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int page:
:param int per_page:
:param str filters: ``` email[value]=string&email[operator]=strict&firstname[value]=string&firstname[operator]=contains _______________ { \"email\": { \"value\": \"string\", \"operator\": \"strict\" }, \"firstname\": { \"value\": \"string\", \"operator\": \"contains\" } } ```Operator can be: strict, contains, between, in, gt (greater than), lt (lower than).
:param str sort_by: Sort by this attribute (id by default)
:param str sort_direction: Sorting direction (asc by default)
:return: Groups
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page', 'per_page', 'filters', 'sort_by', 'sort_direction']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_groups" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
resource_path = '/groups'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
if 'filters' in params:
query_params['filters'] = params['filters']
if 'sort_by' in params:
query_params['sort_by'] = params['sort_by']
if 'sort_direction' in params:
query_params['sort_direction'] = params['sort_direction']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Groups',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_groups(self, product_id, **kwargs):
"""
Get groups allowed to see this product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_groups(product_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int page:
:param int per_page:
:return: Groups
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_product_groups_with_http_info(product_id, **kwargs)
else:
(data) = self.get_product_groups_with_http_info(product_id, **kwargs)
return data
def get_product_groups_with_http_info(self, product_id, **kwargs):
"""
Get groups allowed to see this product
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_product_groups_with_http_info(product_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param int product_id: Product ID to fetch (required)
:param int page:
:param int per_page:
:return: Groups
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['product_id', 'page', 'per_page']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'product_id' is set
if ('product_id' not in params) or (params['product_id'] is None):
raise ValueError("Missing the required parameter `product_id` when calling `get_product_groups`")
collection_formats = {}
resource_path = '/products/{product_id}/groups'.replace('{format}', 'json')
path_params = {}
if 'product_id' in params:
path_params['product_id'] = params['product_id']
query_params = {}
if 'page' in params:
query_params['page'] = params['page']
if 'per_page' in params:
query_params['per_page'] = params['per_page']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['ApiClientId', 'ApiClientSecret']
return self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Groups',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.559578
| 421
| 0.564553
| 2,897
| 28,217
| 5.242665
| 0.066275
| 0.063208
| 0.022123
| 0.028444
| 0.933829
| 0.909797
| 0.900184
| 0.882275
| 0.867988
| 0.846194
| 0
| 0.000382
| 0.350959
| 28,217
| 662
| 422
| 42.623867
| 0.828929
| 0.326293
| 0
| 0.748485
| 1
| 0
| 0.172608
| 0.039761
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039394
| false
| 0
| 0.021212
| 0
| 0.118182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7d701789705d1a73b61c2b3899a8e33324d8c438
| 2,937
|
py
|
Python
|
tpDcc/libs/math/core/matrix.py
|
tpDcc/tpDcc-libs-math
|
d4db4c10d2b460d32b68a6aabbbad4c9ada65c24
|
[
"MIT"
] | null | null | null |
tpDcc/libs/math/core/matrix.py
|
tpDcc/tpDcc-libs-math
|
d4db4c10d2b460d32b68a6aabbbad4c9ada65c24
|
[
"MIT"
] | null | null | null |
tpDcc/libs/math/core/matrix.py
|
tpDcc/tpDcc-libs-math
|
d4db4c10d2b460d32b68a6aabbbad4c9ada65c24
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module that contains trigonometry related functions
"""
from __future__ import print_function, division, absolute_import
import math
def rotation_matrix_xyz(rotation_angles):
"""
Converts given rotation angles to a rotation represented by the sequences of rotations around XYZ with given angles
"""
rad_angles = [math.radians(x) for x in rotation_angles]
x_angle = rad_angles[0]
y_angle = rad_angles[1]
z_angle = rad_angles[2]
s1, c1 = math.sin(z_angle), math.cos(z_angle)
s2, c2 = math.sin(y_angle), math.cos(y_angle)
s3, c3 = math.sin(x_angle), math.cos(x_angle)
m = ((c1 * c2, c1 * s2 * s3 - c3 * s1, s1 * s3 + c1 * c3 * s2),
(c2 * s1, c1 * c3 + s1 * s2 * s3, c3 * s1 * s2 - c1 * s3),
(- s2, c2 * s3, c2 * c3))
return m
def rotation_matrix_xzy(rotation_angles):
"""
Converts given rotation angles to a rotation represented by the sequences of rotations around XZY with given angles
"""
rad_angles = [math.radians(x) for x in rotation_angles]
x_angle = rad_angles[0]
y_angle = rad_angles[1]
z_angle = rad_angles[2]
s1, c1 = math.sin(z_angle), math.cos(z_angle)
s2, c2 = math.sin(y_angle), math.cos(y_angle)
s3, c3 = math.sin(x_angle), math.cos(x_angle)
m = ((c1 * c2, s1 * s3 - c1 * c3 * s2, c3 * s1 + c1 * s2 * s3),
(s2, c2 * c3, -c2 * s3),
(-c2 * s1, c1 * s3 + c3 * s1 * s2, c1 * c3 - s1 * s2 * s3))
return m
def rotation_matrix_to_xyz_euler(rotation_matrix):
"""
Extracts XYZ euler angles from given rotation matrix
"""
sy = math.sqrt(rotation_matrix[0][0] * rotation_matrix[0][0] + rotation_matrix[1][0] * rotation_matrix[1][0])
singular = sy < 1e-7
if not singular:
x = math.degrees(math.atan2(rotation_matrix[2][1], rotation_matrix[2][2]))
y = math.degrees(math.atan2(-rotation_matrix[2][0], sy))
z = math.degrees(math.atan2(rotation_matrix[1][0], rotation_matrix[0][0]))
else:
x = math.degrees(math.atan2(-rotation_matrix[1][2], rotation_matrix[1][1]))
y = math.degrees(math.atan2(-rotation_matrix[2][0], sy))
z = 0
return [x, y, z]
def rotation_matrix_to_xzy_euler(rotation_matrix):
"""
Extracts XZY euler angles from given rotation matrix
"""
sy = math.sqrt(rotation_matrix[0][0] * rotation_matrix[0][0] + rotation_matrix[2][0] * rotation_matrix[2][0])
singular = sy < 1e-7
if not singular:
x = math.degrees(math.atan2(-rotation_matrix[1][2], rotation_matrix[1][1]))
y = math.degrees(math.atan2(rotation_matrix[1][0], sy))
z = math.degrees(math.atan2(-rotation_matrix[2][0], rotation_matrix[0][0]))
else:
x = math.degrees(math.atan2(-rotation_matrix[1][2], rotation_matrix[1][1]))
y = math.degrees(math.atan2(rotation_matrix[1][0], sy))
z = 0
return [x, y, z]
| 33
| 120
| 0.625128
| 461
| 2,937
| 3.822126
| 0.156182
| 0.254257
| 0.093644
| 0.113507
| 0.820091
| 0.759932
| 0.741771
| 0.741203
| 0.728717
| 0.690125
| 0
| 0.062087
| 0.226762
| 2,937
| 88
| 121
| 33.375
| 0.713782
| 0.14811
| 0
| 0.66
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08
| false
| 0
| 0.04
| 0
| 0.2
| 0.02
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7dd9aed7545d861b65f45f4241ecc3df22d76501
| 14,073
|
py
|
Python
|
python/tvm/topi/generic/conv2d.py
|
maxtnuk/incubator-tvm
|
050a836b18c419213f34b8ac76afced425d9d70e
|
[
"Apache-2.0"
] | 2
|
2019-12-10T02:11:26.000Z
|
2019-12-13T14:26:09.000Z
|
python/tvm/topi/generic/conv2d.py
|
maxtnuk/incubator-tvm
|
050a836b18c419213f34b8ac76afced425d9d70e
|
[
"Apache-2.0"
] | null | null | null |
python/tvm/topi/generic/conv2d.py
|
maxtnuk/incubator-tvm
|
050a836b18c419213f34b8ac76afced425d9d70e
|
[
"Apache-2.0"
] | 2
|
2020-11-26T00:35:02.000Z
|
2020-12-07T03:15:56.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name, unused-variable, too-many-locals
# pylint: disable=unused-argument, redefined-builtin
"""Generic convolution schedules"""
from tvm import te
from tvm import autotvm
from tvm.autotvm.task.space import SplitEntity, OtherOptionEntity
from ..utils import get_const_tuple
def fallback_schedule_cpu_common_int8(cfg, wkl, int32_lanes, num_int8_elements):
"""Fallback schedule for conv2d int8 on cpu.
Normally the inner most pattern takes two int8/uint8 tensors
data[num_int8_elements] and kernel[int32_lanes, num_int8_elements],
produces a dot product int32/uint32 output[int32_lanes].
Parameters
----------
int32_lanes : int
How many numbers of int32/uint32 will be produced using intrinsic.
This is related to output channel.
num_int8_elements : int
How many numbers of input int32/uint32 will be multiplied and reduced.
This is related to input channel.
"""
HPAD, WPAD = wkl.hpad, wkl.wpad
HSTR, WSTR = wkl.hstride, wkl.wstride
out_width = (wkl.width + 2 * WPAD - wkl.wkernel) // WSTR + 1
assert wkl.out_filter % int32_lanes == 0, "wkl.out_filter=%d, int32_lanes=%d" % (
wkl.out_filter,
int32_lanes,
)
assert wkl.in_filter % num_int8_elements == 0, "wkl.in_filter=%d, num_int8_elements=%d" % (
wkl.in_filter,
num_int8_elements,
)
oc_bn = int32_lanes if int32_lanes >= num_int8_elements else num_int8_elements
ic_bn = 1
for bn in range(oc_bn, 0, -4):
if wkl.in_filter % bn == 0:
ic_bn = bn
break
reg_n = 1
for n in range(31, 0, -1):
if out_width % n == 0:
reg_n = n
break
cfg["tile_ic"] = SplitEntity([wkl.in_filter // ic_bn, ic_bn])
cfg["tile_oc"] = SplitEntity([wkl.out_filter // oc_bn, oc_bn])
cfg["tile_ow"] = SplitEntity([out_width // reg_n, reg_n])
cfg["unroll_kw"] = OtherOptionEntity(False)
def fallback_schedule_cpu_1x1_int8(cfg, wkl, int32_lanes, num_int8_elements):
"""Fallback schedule for 1x1 conv2d int8 on cpu.
Normally the inner most pattern takes two int8/uint8 tensors
data[num_int8_elements] and kernel[int32_lanes, num_int8_elements],
produces a dot product int32/uint32 output[int32_lanes].
Parameters
----------
int32_lanes : int
How many numbers of int32/uint32 will be produced using intrinsic.
This is related to output channel.
num_int8_elements : int
How many numbers of input int32/uint32 will be multiplied and reduced.
This is related to input channel.
"""
HPAD, WPAD = wkl.hpad, wkl.wpad
HSTR, WSTR = wkl.hstride, wkl.wstride
out_height = (wkl.height + 2 * HPAD - wkl.hkernel) // HSTR + 1
out_width = (wkl.width + 2 * WPAD - wkl.wkernel) // WSTR + 1
assert wkl.out_filter % int32_lanes == 0, "wkl.out_filter=%d, int32_lanes=%d" % (
wkl.out_filter,
int32_lanes,
)
assert wkl.in_filter % num_int8_elements == 0, "wkl.in_filter=%d, num_int8_elements=%d" % (
wkl.in_filter,
num_int8_elements,
)
oc_bn = int32_lanes if int32_lanes >= num_int8_elements else num_int8_elements
ic_bn = 1
for bn in range(oc_bn, 0, -4):
if wkl.in_filter % bn == 0:
ic_bn = bn
break
for ow_factor in range(out_width, 0, -1):
if out_width % ow_factor == 0:
for oh_factor in range(out_height, 0, -1):
if out_height % oh_factor == 0 and ow_factor * oh_factor < 32:
cfg["tile_ic"] = SplitEntity([wkl.in_filter // ic_bn, ic_bn])
cfg["tile_oc"] = SplitEntity([wkl.out_filter // oc_bn, oc_bn])
cfg["tile_oh"] = OtherOptionEntity(oh_factor)
cfg["tile_ow"] = SplitEntity([out_width // ow_factor, ow_factor])
return
raise ValueError("cannot decide default schedule for workload: {}".format(wkl))
def schedule_conv_NCHWc_cpu_common_int8(
s, cfg, data_vec, kernel_vec, conv_out, last, int32_lanes=16, int8_elems=4, intrin=None
):
"""
Defines the schedule for INT8 for Intel and ARM machines
Uses the Intel/ARM intrinsics to use INT8 operations
More details - https://software.intel.com/en-us/articles/
lower-numerical-precision-deep-learning-inference-and-training
"""
reg_n, unroll_kw = cfg["tile_ow"].size[-1], cfg["unroll_kw"].val
_, _, _, _, ic_bn = get_const_tuple(data_vec.shape)
_, _, _, _, oc_bn = get_const_tuple(conv_out.shape)
# schedule pad
if isinstance(s[data_vec].op, te.tensor.ComputeOp) and "pad" in data_vec.op.tag:
batch, ic_chunk, ih, iw, ic_block = s[data_vec].op.axis
parallel_axis = s[data_vec].fuse(batch, ic_chunk, ih)
s[data_vec].parallel(parallel_axis)
data_vec = data_vec.op.input_tensors[0]
if autotvm.GLOBAL_SCOPE.in_tuning:
# only in autotuning, input data of conv2d_NCHWc will be 4-D.
# skip this part during tuning to make records accurate.
# this part will be folded during Relay fold_constant pass.
s[data_vec].pragma(s[data_vec].op.axis[0], "debug_skip_region")
s[kernel_vec].pragma(s[kernel_vec].op.axis[0], "debug_skip_region")
elif isinstance(kernel_vec.op, te.tensor.ComputeOp) and kernel_vec.name == "kernel_vec":
# data and kernel are not pre-computed, schedule layout transform here.
# this should only be used by x86 conv2d_nchw, which is for
# testing purpose.
batch, ic_chunk, ih, ic_block, iw = s[data_vec].op.axis
parallel_axis = s[data_vec].fuse(batch, ic_chunk, ih)
s[data_vec].parallel(parallel_axis)
# conv2d_nchwc_int8 has 7D kernel
oc_chunk, ic_chunk, oh, ow, ic_block, oc_block, _ = s[kernel_vec].op.axis
s[kernel_vec].reorder(oc_chunk, oh, ic_chunk, ow, ic_block, oc_block)
oc_bn = cfg["tile_oc"].size[-1]
if oc_bn > 1:
s[kernel_vec].vectorize(oc_block)
parallel_axis = s[kernel_vec].fuse(oc_chunk, oh)
s[kernel_vec].parallel(parallel_axis)
# schedule 5-D NCHW[x]c conv
C, O = conv_out, last
CC = s.cache_write(C, "global")
batch, oc_chunk, oh, ow, oc_block = s[C].op.axis
ow_chunk, ow_block = s[C].split(ow, factor=reg_n)
s[C].reorder(oc_chunk, oh, ow_chunk, ow_block, oc_block)
parallel_axis = s[C].fuse(batch, oc_chunk, oh)
s[C].vectorize(oc_block)
if C == O:
s[C].parallel(parallel_axis)
s[CC].compute_at(s[C], ow_chunk)
_, oc_chunk, oh, ow, oc_block = s[CC].op.axis
kh, kw, ic_outer, ic_f_inner, ic_s_inner = s[CC].op.reduce_axis
ow_chunk, ow_block = s[CC].split(ow, factor=reg_n)
assert oc_bn % int32_lanes == 0
assert ic_bn % int8_elems == 0 # (u)int8 elements in (u)int32
oc_f_inner, oc_s_inner = s[CC].split(oc_block, factor=int32_lanes)
if unroll_kw:
s[CC].reorder(
oc_chunk,
oh,
ow_chunk,
ic_outer,
kh,
ic_f_inner,
kw,
ow_block,
oc_f_inner,
oc_s_inner,
ic_s_inner,
)
s[CC].unroll(kw)
else:
s[CC].reorder(
oc_chunk,
oh,
ow_chunk,
ic_outer,
kh,
kw,
ic_f_inner,
ow_block,
oc_f_inner,
oc_s_inner,
ic_s_inner,
)
if intrin is not None:
s[CC].tensorize(oc_s_inner, intrin)
s[CC].unroll(ow_block)
s[CC].unroll(oc_f_inner)
if C != O:
out_ndim = len(s[O].op.axis)
if out_ndim == 5:
batch, oc_chunk, oh, ow, oc_block = s[O].op.axis
ow_chunk, ow_block = s[O].split(ow, factor=reg_n)
s[O].reorder(oc_chunk, oh, ow_chunk, ow_block, oc_block)
parallel_axis = s[O].fuse(batch, oc_chunk, oh)
s[C].compute_at(s[O], parallel_axis)
s[O].vectorize(oc_block)
s[O].parallel(parallel_axis)
elif out_ndim == 4:
batch, oc, oh, ow = s[O].op.axis
ow_chunk, ow_block = s[O].split(ow, factor=reg_n)
oc_chunk, oc_block = s[O].split(oc, factor=oc_bn)
s[O].reorder(oc_chunk, oh, ow_chunk, ow_block, oc_block)
parallel_axis = s[O].fuse(batch, oc_chunk, oh)
s[C].compute_at(s[O], parallel_axis)
s[O].vectorize(oc_block)
s[O].parallel(parallel_axis)
else:
raise ValueError("Unsupported output ndim: %s" % out_ndim)
return s
def schedule_conv_NCHWc_cpu_1x1_int8(
s, cfg, data_vec, kernel_vec, conv_out, last, int32_lanes=16, int8_elems=4, intrin=None
):
"""
Defines the 1x1 conv schedule for INT8 for Intel and ARM machines
Uses the Intel/ARM intrinsics to use INT8 operations
More details - https://software.intel.com/en-us/articles/
lower-numerical-precision-deep-learning-inference-and-training
"""
oh_factor, ow_factor = cfg["tile_oh"].val, cfg["tile_ow"].size[-1]
_, _, _, _, ic_bn = get_const_tuple(data_vec.shape)
_, _, _, _, oc_bn = get_const_tuple(conv_out.shape)
# schedule pad
if isinstance(s[data_vec].op, te.tensor.ComputeOp) and "pad" in data_vec.op.tag:
batch, ic_chunk, ih, iw, ic_block = s[data_vec].op.axis
parallel_axis = s[data_vec].fuse(batch, ic_chunk, ih)
s[data_vec].parallel(parallel_axis)
data_vec = data_vec.op.input_tensors[0]
if autotvm.GLOBAL_SCOPE.in_tuning:
# only in autotuning, input data of conv2d_NCHWc will be 4-D.
# skip this part during tuning to make records accurate.
# this part will be folded during Relay fold_constant pass.
s[data_vec].pragma(s[data_vec].op.axis[0], "debug_skip_region")
s[kernel_vec].pragma(s[kernel_vec].op.axis[0], "debug_skip_region")
elif isinstance(kernel_vec.op, te.tensor.ComputeOp) and kernel_vec.name == "kernel_vec":
# data and kernel are not pre-computed, schedule layout transform here.
# this should only be used by x86 conv2d_nchw, which is for
# testing purpose.
batch, ic_chunk, ih, ic_block, iw = s[data_vec].op.axis
parallel_axis = s[data_vec].fuse(batch, ic_chunk, ih)
s[data_vec].parallel(parallel_axis)
# Conv2d int8 schedule has 7D kernel
oc_chunk, ic_chunk, oh, ow, ic_block, oc_block, _ = s[kernel_vec].op.axis
s[kernel_vec].reorder(oc_chunk, oh, ic_chunk, ow, ic_block, oc_block)
oc_bn = cfg["tile_oc"].size[-1]
if oc_bn > 1:
s[kernel_vec].vectorize(oc_block)
parallel_axis = s[kernel_vec].fuse(oc_chunk, oh)
s[kernel_vec].parallel(parallel_axis)
C, O = conv_out, last
CC = s.cache_write(C, "global")
batch, oc_chunk, oh, ow, oc_block = s[C].op.axis
oh_outer, oh_inner = s[C].split(oh, factor=oh_factor)
ow_outer, ow_inner = s[C].split(ow, factor=ow_factor)
s[C].reorder(oc_chunk, oh_outer, ow_outer, oh_inner, ow_inner, oc_block)
s[C].vectorize(oc_block)
parallel_axis = s[C].fuse(batch, oc_chunk, oh_outer)
s[CC].compute_at(s[C], parallel_axis)
if C == O:
s[C].parallel(parallel_axis)
_, oc_chunk, oh, ow, oc_block = s[CC].op.axis
kh, kw, ic_outer, ic_f_inner, ic_s_inner = s[CC].op.reduce_axis
assert oc_bn % int32_lanes == 0
assert ic_bn % int8_elems == 0 # (u)int8 elements in (u)int32
oc_f_inner, oc_s_inner = s[CC].split(oc_block, factor=int32_lanes)
oh_outer, oh_inner = s[CC].split(oh, factor=oh_factor)
ow_outer, ow_inner = s[CC].split(ow, factor=ow_factor)
s[CC].reorder(
oc_chunk,
oh_outer,
ow_outer,
kh,
kw,
ic_outer,
ic_f_inner,
oh_inner,
ow_inner,
oc_f_inner,
oc_s_inner,
ic_s_inner,
)
s[CC].fuse(oc_chunk, oh_outer)
if intrin is not None:
s[CC].tensorize(oc_s_inner, intrin)
s[CC].unroll(ow_inner)
s[CC].unroll(oh_inner)
if C != O:
out_ndim = len(s[O].op.axis)
if out_ndim == 5:
batch, oc_chunk, oh, ow, oc_block = s[O].op.axis
oh_outer, oh_inner = s[O].split(oh, factor=oh_factor)
ow_outer, ow_inner = s[O].split(ow, factor=ow_factor)
s[O].reorder(oc_chunk, oh_outer, ow_outer, oh_inner, ow_inner, oc_block)
parallel_axis = s[O].fuse(batch, oc_chunk, oh_outer)
s[C].compute_at(s[O], parallel_axis)
s[O].vectorize(oc_block)
s[O].parallel(parallel_axis)
elif out_ndim == 4:
batch, oc, oh, ow = s[O].op.axis
oc_chunk, oc_block = s[O].split(oc, factor=oc_bn)
oh_outer, oh_inner = s[O].split(oh, factor=oh_factor)
ow_outer, ow_inner = s[O].split(ow, factor=ow_factor)
s[O].reorder(oc_chunk, oh_outer, ow_outer, oh_inner, ow_inner, oc_block)
parallel_axis = s[O].fuse(batch, oc_chunk, oh_outer)
s[C].compute_at(s[O], parallel_axis)
s[O].vectorize(oc_block)
s[O].parallel(parallel_axis)
else:
raise ValueError("Unsupported output ndim: %s" % out_ndim)
return s
| 38.768595
| 95
| 0.634335
| 2,184
| 14,073
| 3.846612
| 0.130952
| 0.008094
| 0.027854
| 0.02095
| 0.827163
| 0.807166
| 0.787406
| 0.777288
| 0.771099
| 0.771099
| 0
| 0.01933
| 0.253748
| 14,073
| 362
| 96
| 38.875691
| 0.780613
| 0.228665
| 0
| 0.772727
| 0
| 0
| 0.042343
| 0
| 0
| 0
| 0
| 0
| 0.033058
| 1
| 0.016529
| false
| 0
| 0.016529
| 0
| 0.045455
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7ddbdf636f7a8aee0e282908354355bf8a69143c
| 283,380
|
py
|
Python
|
protocols/reports_4_2_0.py
|
Lucioric2000/GelReportModels
|
1704cdea3242d5b46c8b81ef46553ccae2799435
|
[
"Apache-2.0"
] | null | null | null |
protocols/reports_4_2_0.py
|
Lucioric2000/GelReportModels
|
1704cdea3242d5b46c8b81ef46553ccae2799435
|
[
"Apache-2.0"
] | null | null | null |
protocols/reports_4_2_0.py
|
Lucioric2000/GelReportModels
|
1704cdea3242d5b46c8b81ef46553ccae2799435
|
[
"Apache-2.0"
] | null | null | null |
"""
DO NOT EDIT THIS FILE!!
This file is automatically generated by the process_schemas.py program
in the scripts directory. It is not intended to be edited directly. If
you need to update the GEL protocol classes, please run the script
on the appropriate schema version.
"""
from protocols.protocol import ProtocolElement
from protocols.protocol import SearchRequest
from protocols.protocol import SearchResponse
from protocols.protocol import avro_parse
import avro.schema
version = '4.2.0'
class ACMGClassification(object):
"""
No documentation
"""
pathogenic_variant = "pathogenic_variant"
likely_pathogenic_variant = "likely_pathogenic_variant"
variant_of_unknown_clinical_significance = "variant_of_unknown_clinical_significance"
likely_benign_variant = "likely_benign_variant"
benign_variant = "benign_variant"
not_assessed = "not_assessed"
def __hash__(self):
return str(self).__hash__()
class Action(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "Action", "namespace": "org.gel.models.report.avro", "fields": [{"name":
"actionType", "type": ["null", {"type": "enum", "name": "ActionType", "doc": "", "symbols":
["therapy", "therapeutic", "prognosis", "diagnosis"]}]}, {"name": "evidences", "type": ["null",
{"type": "array", "items": "string"}], "doc": ""}, {"name": "drug", "type": ["null", "string"],
"doc": ""}, {"name": "status", "type": ["null", {"type": "enum", "name": "ActionStatus", "doc": "",
"symbols": ["clinical", "pre_clinical"]}], "doc": ""}, {"name": "variantActionable", "type":
"boolean", "doc": ""}, {"name": "comments", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "url", "type": ["null", "string"], "doc": ""}, {"name": "evidenceType", "type":
["null", "string"], "doc": ""}, {"name": "source", "type": "string", "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"actionType",
"comments",
"drug",
"evidenceType",
"evidences",
"source",
"status",
"url",
"variantActionable",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'actionType', 'comments', 'drug', 'evidenceType', 'evidences',
'source', 'status', 'url', 'variantActionable'
]
def __init__(self, **kwargs):
self.actionType = kwargs.get(
'actionType', None)
self.comments = kwargs.get(
'comments', None)
self.drug = kwargs.get(
'drug', None)
self.evidenceType = kwargs.get(
'evidenceType', None)
self.evidences = kwargs.get(
'evidences', None)
self.source = kwargs.get(
'source', None)
self.status = kwargs.get(
'status', None)
self.url = kwargs.get(
'url', None)
self.variantActionable = kwargs.get(
'variantActionable', None)
class ActionStatus(object):
"""
this is the type of actionability for the reported event
"""
clinical = "clinical"
pre_clinical = "pre_clinical"
def __hash__(self):
return str(self).__hash__()
class ActionType(object):
"""
this is the type of actionability for the reported event
"""
therapy = "therapy"
therapeutic = "therapeutic"
prognosis = "prognosis"
diagnosis = "diagnosis"
def __hash__(self):
return str(self).__hash__()
class Actionability(object):
"""
No documentation
"""
yes = "yes"
no = "no"
not_yet = "not_yet"
na = "na"
def __hash__(self):
return str(self).__hash__()
class AdditionalAnalysisPanel(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "AdditionalAnalysisPanel", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "specificDisease", "type": "string"}, {"name": "panelName", "type": "string"},
{"name": "panelVersion", "type": ["null", "string"]}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"panelName",
"panelVersion",
"specificDisease",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'panelName', 'panelVersion', 'specificDisease'
]
def __init__(self, **kwargs):
self.panelName = kwargs.get(
'panelName', None)
self.panelVersion = kwargs.get(
'panelVersion', None)
self.specificDisease = kwargs.get(
'specificDisease', None)
class AdoptedStatus(object):
"""
adoptedin means adopted into the family adoptedout means child
belonged to the family and was adopted out
"""
notadopted = "notadopted"
adoptedin = "adoptedin"
adoptedout = "adoptedout"
def __hash__(self):
return str(self).__hash__()
class AffectionStatus(object):
"""
Affection Status
"""
UNAFFECTED = "UNAFFECTED"
AFFECTED = "AFFECTED"
UNCERTAIN = "UNCERTAIN"
def __hash__(self):
return str(self).__hash__()
class AgeOfOnset(object):
"""
No documentation
"""
EMBRYONAL_ONSET = "EMBRYONAL_ONSET"
FETAL_ONSET = "FETAL_ONSET"
NEONATAL_ONSET = "NEONATAL_ONSET"
INFANTILE_ONSET = "INFANTILE_ONSET"
CHILDHOOD_ONSET = "CHILDHOOD_ONSET"
JUVENILE_ONSET = "JUVENILE_ONSET"
YOUNG_ADULT_ONSET = "YOUNG_ADULT_ONSET"
LATE_ONSET = "LATE_ONSET"
MIDDLE_AGE_ONSET = "MIDDLE_AGE_ONSET"
def __hash__(self):
return str(self).__hash__()
class AlleleOrigin(object):
"""
Variant origin. * `SO_0001781`: de novo variant.
http://purl.obolibrary.org/obo/SO_0001781 * `SO_0001778`: germline
variant. http://purl.obolibrary.org/obo/SO_0001778 * `SO_0001775`:
maternal variant. http://purl.obolibrary.org/obo/SO_0001775 *
`SO_0001776`: paternal variant.
http://purl.obolibrary.org/obo/SO_0001776 * `SO_0001779`: pedigree
specific variant. http://purl.obolibrary.org/obo/SO_0001779 *
`SO_0001780`: population specific variant.
http://purl.obolibrary.org/obo/SO_0001780 * `SO_0001777`: somatic
variant. http://purl.obolibrary.org/obo/SO_0001777
"""
de_novo_variant = "de_novo_variant"
germline_variant = "germline_variant"
maternal_variant = "maternal_variant"
paternal_variant = "paternal_variant"
pedigree_specific_variant = "pedigree_specific_variant"
population_specific_variant = "population_specific_variant"
somatic_variant = "somatic_variant"
def __hash__(self):
return str(self).__hash__()
class AnalysisPanel(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "AnalysisPanel", "namespace": "org.gel.models.participant.avro",
"fields": [{"name": "specificDisease", "type": "string"}, {"name": "panelName", "type": "string"},
{"name": "panelVersion", "type": ["null", "string"]}, {"name": "reviewOutcome", "type": "string"},
{"name": "multipleGeneticOrigins", "type": "string"}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"multipleGeneticOrigins",
"panelName",
"panelVersion",
"reviewOutcome",
"specificDisease",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'multipleGeneticOrigins', 'panelName', 'panelVersion',
'reviewOutcome', 'specificDisease'
]
def __init__(self, **kwargs):
self.multipleGeneticOrigins = kwargs.get(
'multipleGeneticOrigins', None)
self.panelName = kwargs.get(
'panelName', None)
self.panelVersion = kwargs.get(
'panelVersion', None)
self.reviewOutcome = kwargs.get(
'reviewOutcome', None)
self.specificDisease = kwargs.get(
'specificDisease', None)
class AnalysisType(object):
"""
No documentation
"""
rare_disease = "rare_disease"
cancer = "cancer"
def __hash__(self):
return str(self).__hash__()
class Ancestries(ProtocolElement):
"""
Ancestries, defined as Ethnic category(ies) and Chi-square test
"""
_schemaSource = """
{"type": "record", "name": "Ancestries", "namespace": "org.gel.models.participant.avro", "doc": "",
"fields": [{"name": "mothersEthnicOrigin", "type": ["null", {"type": "enum", "name":
"EthnicCategory", "doc": "", "symbols": ["D", "E", "F", "G", "A", "B", "C", "L", "M", "N", "H", "J",
"K", "P", "S", "R", "Z"]}], "doc": ""}, {"name": "mothersOtherRelevantAncestry", "type": ["null",
"string"], "doc": ""}, {"name": "fathersEthnicOrigin", "type": ["null", "EthnicCategory"], "doc":
""}, {"name": "fathersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"chiSquare1KGenomesPhase3Pop", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "ChiSquare1KGenomesPhase3Pop", "doc": "", "fields": [{"name": "kgSuperPopCategory", "type":
{"type": "enum", "name": "KgSuperPopCategory", "doc": "", "symbols": ["AFR", "AMR", "EAS", "EUR",
"SAS"]}, "doc": ""}, {"name": "kgPopCategory", "type": ["null", {"type": "enum", "name":
"KgPopCategory", "doc": "", "symbols": ["ACB", "ASW", "BEB", "CDX", "CEU", "CHB", "CHS", "CLM",
"ESN", "FIN", "GBR", "GIH", "GWD", "IBS", "ITU", "JPT", "KHV", "LWK", "MSL", "MXL", "PEL", "PJL",
"PUR", "STU", "TSI", "YRI"]}], "doc": ""}, {"name": "chiSquare", "type": "double", "doc": ""}]}}],
"doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"chiSquare1KGenomesPhase3Pop",
"fathersEthnicOrigin",
"fathersOtherRelevantAncestry",
"mothersEthnicOrigin",
"mothersOtherRelevantAncestry",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'chiSquare1KGenomesPhase3Pop': ChiSquare1KGenomesPhase3Pop,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'chiSquare1KGenomesPhase3Pop': ChiSquare1KGenomesPhase3Pop,
}
return embeddedTypes[fieldName]
__slots__ = [
'chiSquare1KGenomesPhase3Pop', 'fathersEthnicOrigin',
'fathersOtherRelevantAncestry', 'mothersEthnicOrigin',
'mothersOtherRelevantAncestry'
]
def __init__(self, **kwargs):
self.chiSquare1KGenomesPhase3Pop = kwargs.get(
'chiSquare1KGenomesPhase3Pop', None)
self.fathersEthnicOrigin = kwargs.get(
'fathersEthnicOrigin', None)
self.fathersOtherRelevantAncestry = kwargs.get(
'fathersOtherRelevantAncestry', None)
self.mothersEthnicOrigin = kwargs.get(
'mothersEthnicOrigin', None)
self.mothersOtherRelevantAncestry = kwargs.get(
'mothersOtherRelevantAncestry', None)
class AuditLog(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "AuditLog", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "interpretationRequestId", "type": "string", "doc": ""}, {"name":
"interpretationRequestVersion", "type": "string", "doc": ""}, {"name": "code", "type": {"type":
"enum", "name": "Code", "doc": "", "symbols": ["C0", "C1", "C2", "C3", "C4", "C5", "C6", "C7"]}},
{"name": "caseShared", "type": ["null", {"type": "record", "name": "CaseShared", "fields": [{"name":
"previousGroups", "type": {"type": "array", "items": "string"}}, {"name": "modifiedGroups", "type":
{"type": "array", "items": "string"}}]}]}, {"name": "supportingEvidences", "type": ["null", {"type":
"record", "name": "SupportingEvidences", "fields": [{"name": "previousSupportingEvidences", "type":
{"type": "array", "items": "string"}}, {"name": "modifiedSupportingEvidences", "type": {"type":
"array", "items": "string"}}]}]}, {"name": "modifiedVariants", "type": ["null", {"type": "array",
"items": {"type": "record", "name": "ModifiedVariant", "fields": [{"name": "previousVariant",
"type": {"type": "record", "name": "ReportedVariant", "fields": [{"name": "chromosome", "type":
"string", "doc": ""}, {"name": "dbSnpId", "type": ["null", "string"], "doc": ""}, {"name":
"position", "type": "int", "doc": ""}, {"name": "reference", "type": "string", "doc": ""}, {"name":
"alternate", "type": "string", "doc": ""}, {"name": "calledGenotypes", "type": {"type": "array",
"items": {"type": "record", "name": "CalledGenotype", "doc": "", "fields": [{"name": "gelId",
"type": "string", "doc": ""}, {"name": "sampleId", "type": "string", "doc": ""}, {"name":
"genotype", "type": {"type": "enum", "name": "Zygosity", "doc": "", "symbols":
["reference_homozygous", "heterozygous", "alternate_homozygous", "missing",
"half_missing_reference", "half_missing_alternate", "alternate_hemizigous", "reference_hemizigous",
"unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc": ""}, {"name":
"depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate", "type": ["null",
"int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}}, "doc": ""},
{"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEvent", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc":
""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance",
"type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type": {"type":
"enum", "name": "Penetrance", "namespace": "org.gel.models.participant.avro", "doc": "", "symbols":
["complete", "incomplete"]}, "doc": ""}, {"name": "score", "type": "float", "doc": ""}, {"name":
"vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc": ""}, {"name":
"variantClassification", "type": ["null", {"type": "enum", "name": "VariantClassification", "doc":
"", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}}, {"name": "modifiedVariant", "type": "ReportedVariant"}]}}]},
{"name": "addedVariants", "type": ["null", {"type": "array", "items": "ReportedVariant"}]}, {"name":
"removedVariants", "type": ["null", {"type": "array", "items": "ReportedVariant"}]}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"addedVariants",
"caseShared",
"code",
"interpretationRequestId",
"interpretationRequestVersion",
"modifiedVariants",
"removedVariants",
"supportingEvidences",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'addedVariants': ReportedVariant,
'caseShared': CaseShared,
'modifiedVariants': ModifiedVariant,
'removedVariants': ReportedVariant,
'supportingEvidences': SupportingEvidences,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'addedVariants': ReportedVariant,
'caseShared': CaseShared,
'modifiedVariants': ModifiedVariant,
'removedVariants': ReportedVariant,
'supportingEvidences': SupportingEvidences,
}
return embeddedTypes[fieldName]
__slots__ = [
'addedVariants', 'caseShared', 'code',
'interpretationRequestId', 'interpretationRequestVersion',
'modifiedVariants', 'removedVariants', 'supportingEvidences'
]
def __init__(self, **kwargs):
self.addedVariants = kwargs.get(
'addedVariants', None)
self.caseShared = kwargs.get(
'caseShared', None)
self.code = kwargs.get(
'code', None)
self.interpretationRequestId = kwargs.get(
'interpretationRequestId', None)
self.interpretationRequestVersion = kwargs.get(
'interpretationRequestVersion', None)
self.modifiedVariants = kwargs.get(
'modifiedVariants', None)
self.removedVariants = kwargs.get(
'removedVariants', None)
self.supportingEvidences = kwargs.get(
'supportingEvidences', None)
class CalledGenotype(ProtocolElement):
"""
This is intended to hold the genotypes for the family members
In principle it is a phased zygosity as in VCF spec and called by
the analysis provider if further phasing is conducted
"""
_schemaSource = """
{"type": "record", "name": "CalledGenotype", "namespace": "org.gel.models.report.avro", "doc": "",
"fields": [{"name": "gelId", "type": "string", "doc": ""}, {"name": "sampleId", "type": "string",
"doc": ""}, {"name": "genotype", "type": {"type": "enum", "name": "Zygosity", "doc": "", "symbols":
["reference_homozygous", "heterozygous", "alternate_homozygous", "missing",
"half_missing_reference", "half_missing_alternate", "alternate_hemizigous", "reference_hemizigous",
"unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc": ""}, {"name":
"depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate", "type": ["null",
"int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"copyNumber",
"depthAlternate",
"depthReference",
"gelId",
"genotype",
"phaseSet",
"sampleId",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'copyNumber', 'depthAlternate', 'depthReference', 'gelId',
'genotype', 'phaseSet', 'sampleId'
]
def __init__(self, **kwargs):
self.copyNumber = kwargs.get(
'copyNumber', None)
self.depthAlternate = kwargs.get(
'depthAlternate', None)
self.depthReference = kwargs.get(
'depthReference', None)
self.gelId = kwargs.get(
'gelId', None)
self.genotype = kwargs.get(
'genotype', None)
self.phaseSet = kwargs.get(
'phaseSet', None)
self.sampleId = kwargs.get(
'sampleId', None)
class CancerActionability(object):
"""
An enumeration Variant Actionability: *
`predicts_therapeutic_response`: Predicts therapeutic response
* `prognostic`: Prognostic * `defines_diagnosis_group`:
Defines diagnosis group * `eligibility_for_trial`:
Eligibility for trial * `other`: Other (please specify)
"""
predicts_therapeutic_response = "predicts_therapeutic_response"
prognostic = "prognostic"
defines_diagnosis_group = "defines_diagnosis_group"
eligibility_for_trial = "eligibility_for_trial"
other = "other"
def __hash__(self):
return str(self).__hash__()
class CancerActionableVariants(object):
"""
* `yes`: yes * `no`: no
"""
yes = "yes"
no = "no"
def __hash__(self):
return str(self).__hash__()
class CancerCaseLevelQuestions(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "CancerCaseLevelQuestions", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "total_review_time", "type": "double", "doc": ""}, {"name": "mdt1_time", "type":
"double", "doc": ""}, {"name": "mdt2_time", "type": ["null", "double"], "doc": ""}, {"name":
"validation_assay_time", "type": ["null", "double"], "doc": ""}, {"name": "wet_validation_time",
"type": ["null", "double"], "doc": ""}, {"name": "analytical_validation_time", "type": ["null",
"double"], "doc": ""}, {"name": "primary_reporting_time", "type": "double", "doc": ""}, {"name":
"primary_authorisation_time", "type": "double", "doc": ""}, {"name": "report_distribution_time",
"type": "double", "doc": ""}, {"name": "total_time", "type": "double", "doc": ""}, {"name":
"reviewedInMdtWga", "type": {"type": "enum", "name": "ReviewedParts", "doc": "", "symbols":
["domain_1", "domain_1_and_2", "domain_1_2_and_suplementary"]}, "doc": ""}, {"name":
"actionableVariants", "type": {"type": "enum", "name": "CancerActionableVariants", "doc": "",
"symbols": ["yes", "no"]}, "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"actionableVariants",
"analytical_validation_time",
"mdt1_time",
"mdt2_time",
"primary_authorisation_time",
"primary_reporting_time",
"report_distribution_time",
"reviewedInMdtWga",
"total_review_time",
"total_time",
"validation_assay_time",
"wet_validation_time",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'actionableVariants', 'analytical_validation_time',
'mdt1_time', 'mdt2_time', 'primary_authorisation_time',
'primary_reporting_time', 'report_distribution_time',
'reviewedInMdtWga', 'total_review_time', 'total_time',
'validation_assay_time', 'wet_validation_time'
]
def __init__(self, **kwargs):
self.actionableVariants = kwargs.get(
'actionableVariants', None)
self.analytical_validation_time = kwargs.get(
'analytical_validation_time', None)
self.mdt1_time = kwargs.get(
'mdt1_time', None)
self.mdt2_time = kwargs.get(
'mdt2_time', None)
self.primary_authorisation_time = kwargs.get(
'primary_authorisation_time', None)
self.primary_reporting_time = kwargs.get(
'primary_reporting_time', None)
self.report_distribution_time = kwargs.get(
'report_distribution_time', None)
self.reviewedInMdtWga = kwargs.get(
'reviewedInMdtWga', None)
self.total_review_time = kwargs.get(
'total_review_time', None)
self.total_time = kwargs.get(
'total_time', None)
self.validation_assay_time = kwargs.get(
'validation_assay_time', None)
self.wet_validation_time = kwargs.get(
'wet_validation_time', None)
class CancerExitQuestionnaire(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "CancerExitQuestionnaire", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "eventDate", "type": "string"}, {"name": "reporter", "type": "string"}, {"name":
"caseLevelQuestions", "type": {"type": "record", "name": "CancerCaseLevelQuestions", "fields":
[{"name": "total_review_time", "type": "double", "doc": ""}, {"name": "mdt1_time", "type": "double",
"doc": ""}, {"name": "mdt2_time", "type": ["null", "double"], "doc": ""}, {"name":
"validation_assay_time", "type": ["null", "double"], "doc": ""}, {"name": "wet_validation_time",
"type": ["null", "double"], "doc": ""}, {"name": "analytical_validation_time", "type": ["null",
"double"], "doc": ""}, {"name": "primary_reporting_time", "type": "double", "doc": ""}, {"name":
"primary_authorisation_time", "type": "double", "doc": ""}, {"name": "report_distribution_time",
"type": "double", "doc": ""}, {"name": "total_time", "type": "double", "doc": ""}, {"name":
"reviewedInMdtWga", "type": {"type": "enum", "name": "ReviewedParts", "doc": "", "symbols":
["domain_1", "domain_1_and_2", "domain_1_2_and_suplementary"]}, "doc": ""}, {"name":
"actionableVariants", "type": {"type": "enum", "name": "CancerActionableVariants", "doc": "",
"symbols": ["yes", "no"]}, "doc": ""}]}}, {"name": "somaticVariantLevelQuestions", "type": ["null",
{"type": "array", "items": {"type": "record", "name": "CancerSomaticVariantLevelQuestions",
"fields": [{"name": "variantDetails", "type": "string", "doc": ""}, {"name": "variantActionability",
"type": {"type": "enum", "name": "CancerActionability", "doc": "", "symbols":
["predicts_therapeutic_response", "prognostic", "defines_diagnosis_group", "eligibility_for_trial",
"other"]}, "doc": ""}, {"name": "otherVariantActionability", "type": ["null", "string"]}, {"name":
"variantUsability", "type": {"type": "enum", "name": "CancerUsabilitySomatic", "doc": "", "symbols":
["already_actioned", "actioned_result_of_this_wga", "not_yet_actioned"]}, "doc": ""}, {"name":
"variantTested", "type": {"type": "enum", "name": "CancerTested", "doc": "", "symbols":
["not_indicated_for_patient_care", "no_orthologous_test_available", "test_performed_prior_to_wga",
"technical_validation_following_wga"]}, "doc": ""}, {"name": "validationAssayType", "type":
"string", "doc": ""}]}}], "doc": ""}, {"name": "germlineVariantLevelQuestions", "type": ["null",
{"type": "array", "items": {"type": "record", "name": "CancerGermlineVariantLevelQuestions",
"fields": [{"name": "variantDetails", "type": "string", "doc": ""}, {"name": "variantUsability",
"type": {"type": "enum", "name": "CancerUsabilityGermline", "doc": "", "symbols":
["already_actioned", "actioned_result_of_this_wga"]}, "doc": ""}, {"name": "variantTested", "type":
"CancerTested", "doc": ""}, {"name": "validationAssayType", "type": "string", "doc": ""}]}}], "doc":
""}, {"name": "additionalComments", "type": ["null", "string"], "doc": ""}, {"name":
"otherActionableVariants", "type": ["null", "string"], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalComments",
"caseLevelQuestions",
"eventDate",
"germlineVariantLevelQuestions",
"otherActionableVariants",
"reporter",
"somaticVariantLevelQuestions",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'caseLevelQuestions': CancerCaseLevelQuestions,
'germlineVariantLevelQuestions': CancerGermlineVariantLevelQuestions,
'somaticVariantLevelQuestions': CancerSomaticVariantLevelQuestions,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'caseLevelQuestions': CancerCaseLevelQuestions,
'germlineVariantLevelQuestions': CancerGermlineVariantLevelQuestions,
'somaticVariantLevelQuestions': CancerSomaticVariantLevelQuestions,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalComments', 'caseLevelQuestions', 'eventDate',
'germlineVariantLevelQuestions', 'otherActionableVariants',
'reporter', 'somaticVariantLevelQuestions'
]
def __init__(self, **kwargs):
self.additionalComments = kwargs.get(
'additionalComments', None)
self.caseLevelQuestions = kwargs.get(
'caseLevelQuestions', CancerCaseLevelQuestions())
self.eventDate = kwargs.get(
'eventDate', None)
self.germlineVariantLevelQuestions = kwargs.get(
'germlineVariantLevelQuestions', None)
self.otherActionableVariants = kwargs.get(
'otherActionableVariants', None)
self.reporter = kwargs.get(
'reporter', None)
self.somaticVariantLevelQuestions = kwargs.get(
'somaticVariantLevelQuestions', None)
class CancerGermlineVariantLevelQuestions(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "CancerGermlineVariantLevelQuestions", "namespace":
"org.gel.models.report.avro", "fields": [{"name": "variantDetails", "type": "string", "doc": ""},
{"name": "variantUsability", "type": {"type": "enum", "name": "CancerUsabilityGermline", "doc": "",
"symbols": ["already_actioned", "actioned_result_of_this_wga"]}, "doc": ""}, {"name":
"variantTested", "type": {"type": "enum", "name": "CancerTested", "doc": "", "symbols":
["not_indicated_for_patient_care", "no_orthologous_test_available", "test_performed_prior_to_wga",
"technical_validation_following_wga"]}, "doc": ""}, {"name": "validationAssayType", "type":
"string", "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"validationAssayType",
"variantDetails",
"variantTested",
"variantUsability",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'validationAssayType', 'variantDetails', 'variantTested',
'variantUsability'
]
def __init__(self, **kwargs):
self.validationAssayType = kwargs.get(
'validationAssayType', None)
self.variantDetails = kwargs.get(
'variantDetails', None)
self.variantTested = kwargs.get(
'variantTested', None)
self.variantUsability = kwargs.get(
'variantUsability', None)
class CancerInterpretationRequest(ProtocolElement):
"""
This record represents basic information for this report
"""
_schemaSource = """
{"type": "record", "name": "CancerInterpretationRequest", "namespace": "org.gel.models.report.avro",
"doc": "", "fields": [{"name": "versionControl", "type": {"type": "record", "name":
"ReportVersionControl", "fields": [{"name": "gitVersionControl", "type": "string", "doc": "",
"default": "4.2.0"}]}, "doc": ""}, {"name": "reportRequestId", "type": "string", "doc": ""},
{"name": "reportVersion", "type": "int", "doc": ""}, {"name": "internalStudyId", "type": "string",
"doc": ""}, {"name": "interpretGenome", "type": "boolean", "doc": "", "default": false}, {"name":
"bams", "type": {"type": "array", "items": {"type": "record", "name": "File", "doc": "", "fields":
[{"name": "sampleId", "type": ["null", {"type": "array", "items": "string"}], "doc": ""}, {"name":
"uriFile", "type": "string", "doc": ""}, {"name": "fileType", "type": {"type": "enum", "name":
"FileType", "symbols": ["BAM", "gVCF", "VCF_small", "VCF_somatic_small", "VCF_CNV",
"VCF_somatic_CNV", "VCF_SV", "VCF_somatic_SV", "VCF_SV_CNV", "SVG", "ANN", "BigWig", "MD5Sum",
"ROH", "OTHER", "PARTITION", "VARIANT_FREQUENCIES", "COVERAGE"]}}, {"name": "md5Sum", "type":
["null", "string"]}]}}, "doc": ""}, {"name": "vcfs", "type": {"type": "array", "items": "File"},
"doc": ""}, {"name": "bigWigs", "type": {"type": "array", "items": "File"}, "doc": ""}, {"name":
"annotationFile", "type": ["null", "File"], "doc": ""}, {"name": "cancerParticipant", "type":
{"type": "record", "name": "CancerParticipant", "namespace": "org.gel.models.participant.avro",
"doc": "", "fields": [{"name": "yearOfBirth", "type": ["null", "int"]}, {"name": "morphology",
"type": ["null", {"type": "array", "items": "string"}]}, {"name": "readyForAnalysis", "type":
"boolean"}, {"name": "consentStatus", "type": ["null", {"type": "record", "name": "ConsentStatus",
"doc": "", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "", "default": false},
{"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default": false}, {"name":
"secondaryFindingConsent", "type": "boolean", "doc": "", "default": false}, {"name":
"carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}], "doc": ""}, {"name":
"center", "type": ["null", "string"], "doc": ""}, {"name": "individualId", "type": "string", "doc":
""}, {"name": "primaryDiagnosisDisease", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "primaryDiagnosisSubDisease", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}, {"name": "sex", "type": {"type": "enum", "name": "Sex", "symbols":
["FEMALE", "MALE", "UNKNOWN"]}, "doc": ""}, {"name": "additionalInformation", "type": ["null",
{"type": "map", "values": "string"}], "doc": ""}, {"name": "assignedICD10", "type": ["null",
{"type": "array", "items": "string"}], "doc": ""}, {"name": "tumourSamples", "type": {"type":
"array", "items": {"type": "record", "name": "TumourSample", "fields": [{"name": "sampleId", "type":
"string", "doc": ""}, {"name": "labSampleId", "type": "int", "doc": ""}, {"name": "LDPCode", "type":
"string", "doc": ""}, {"name": "tumourId", "type": "string", "doc": ""}, {"name": "programmePhase",
"type": ["null", {"type": "enum", "name": "ProgrammePhase", "symbols": ["CRUK", "OXFORD", "CLL",
"IIP", "MAIN", "EXPT"]}], "doc": ""}, {"name": "diseaseType", "type": ["null", {"type": "enum",
"name": "diseaseType", "symbols": ["ADULT_GLIOMA", "BLADDER", "BREAST",
"CARCINOMA_OF_UNKNOWN_PRIMARY", "CHILDHOOD", "COLORECTAL", "ENDOMETRIAL_CARCINOMA", "HAEMONC",
"HEPATOPANCREATOBILIARY", "LUNG", "MALIGNANT_MELANOMA", "NASOPHARYNGEAL", "ORAL_OROPHARYNGEAL",
"OVARIAN", "PROSTATE", "RENAL", "SARCOMA", "SINONASAL", "TESTICULAR_GERM_CELL_TUMOURS",
"UPPER_GASTROINTESTINAL", "NON_HODGKINS_B_CELL_LYMPHOMA_LOW_MOD_GRADE", "CLASSICAL_HODGKINS",
"NODULAR_LYMPHOCYTE_PREDOMINANT_HODGKINS", "T_CELL_LYMPHOMA"]}], "doc": ""}, {"name":
"diseaseSubType", "type": ["null", "string"], "doc": ""}, {"name": "clinicalSampleDateTime", "type":
["null", "string"], "doc": ""}, {"name": "tumourType", "type": ["null", {"type": "enum", "name":
"TumourType", "symbols": ["PRIMARY", "METASTATIC_RECURRENCE", "RECURRENCE_OF_PRIMARY_TUMOUR",
"METASTASES"]}], "doc": ""}, {"name": "tumourContent", "type": ["null", {"type": "enum", "name":
"TumourContent", "symbols": ["High", "Medium", "Low"]}], "doc": ""}, {"name": "source", "type":
["null", {"type": "enum", "name": "SampleSource", "symbols": ["TUMOUR",
"BONE_MARROW_ASPIRATE_TUMOUR_SORTED_CELLS", "BONE_MARROW_ASPIRATE_TUMOUR_CELLS", "BLOOD", "SALIVA",
"FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "preparationMethod", "type": ["null", {"type":
"enum", "name": "PreparationMethod", "symbols": ["EDTA", "ORAGENE", "FF", "FFPE",
"CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}, {"name": "tissueSource", "type": ["null", {"type":
"enum", "name": "TissueSource", "symbols": ["BMA_TUMOUR_SORTED_CELLS", "CT_GUIDED_BIOPSY",
"ENDOSCOPIC_BIOPSY", "ENDOSCOPIC_ULTRASOUND_GUIDED_BIOPSY", "ENDOSCOPIC_ULTRASOUND_GUIDED_FNA",
"LAPAROSCOPIC_BIOPSY", "LAPAROSCOPIC_EXCISION", "MRI_GUIDED_BIOPSY", "NON_GUIDED_BIOPSY",
"SURGICAL_RESECTION", "STEREOTACTICALLY_GUIDED_BIOPSY", "USS_GUIDED_BIOPSY",
"NON_STANDARD_BIOPSY"]}], "doc": ""}, {"name": "product", "type": ["null", {"type": "enum", "name":
"Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name": "morphologyICD", "type": ["null",
"string"], "doc": ""}, {"name": "morphologySnomedCT", "type": ["null", "string"], "doc": ""},
{"name": "morphologySnomedRT", "type": ["null", "string"], "doc": ""}, {"name": "topographyICD",
"type": ["null", "string"], "doc": ""}, {"name": "topographySnomedCT", "type": ["null", "string"],
"doc": ""}, {"name": "topographySnomedRT", "type": ["null", "string"], "doc": ""}]}}}, {"name":
"germlineSamples", "type": {"type": "array", "items": {"type": "record", "name": "GermlineSample",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "LDPCode", "type": "string", "doc": ""}, {"name": "source", "type": ["null",
"SampleSource"], "doc": ""}, {"name": "product", "type": ["null", "Product"], "doc": ""}, {"name":
"preparationMethod", "type": ["null", "PreparationMethod"], "doc": ""}, {"name": "programmePhase",
"type": ["null", "ProgrammePhase"], "doc": ""}, {"name": "clinicalSampleDateTime", "type": ["null",
"string"], "doc": ""}]}}}, {"name": "matchedSamples", "type": {"type": "array", "items": {"type":
"record", "name": "MatchedSamples", "doc": "", "fields": [{"name": "germlineSampleId", "type":
["null", "string"], "doc": ""}, {"name": "tumourSampleId", "type": ["null", "string"], "doc":
""}]}}}, {"name": "versionControl", "type": ["null", {"type": "record", "name": "VersionControl",
"fields": [{"name": "GitVersionControl", "type": "string", "doc": "", "default": "1.0.3"}]}], "doc":
""}]}}, {"name": "analysisUri", "type": "string", "doc": ""}, {"name": "analysisVersion", "type":
"string", "doc": ""}, {"name": "tieredVariants", "type": {"type": "array", "items": {"type":
"record", "name": "ReportedVariantCancer", "fields": [{"name": "chromosome", "type": "string",
"doc": ""}, {"name": "position", "type": "int", "doc": ""}, {"name": "reference", "type": "string",
"doc": ""}, {"name": "alternate", "type": "string", "doc": ""}, {"name": "cosmicIds", "type":
["null", {"type": "array", "items": "string"}], "doc": ""}, {"name": "clinVarIds", "type": ["null",
{"type": "array", "items": "string"}], "doc": ""}, {"name": "dbSnpId", "type": ["null", "string"],
"doc": ""}, {"name": "cdnaChange", "type": ["null", "string"], "doc": ""}, {"name": "proteinChange",
"type": ["null", "string"], "doc": ""}, {"name": "commonAf", "type": ["null", "int"], "doc": ""},
{"name": "ihp", "type": ["null", "int"], "doc": ""}, {"name": "additionalTextualVariantAnnotations",
"type": ["null", {"type": "map", "values": "string"}], "doc": ""}, {"name":
"additionalNumericVariantAnnotations", "type": ["null", {"type": "map", "values": "float"}], "doc":
""}, {"name": "comments", "type": ["null", {"type": "array", "items": "string"}], "doc": ""},
{"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEventCancer", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"genomicFeatureCancer", "type": {"type": "record", "name": "GenomicFeatureCancer", "fields":
[{"name": "featureType", "type": {"type": "enum", "name": "FeatureTypeCancer", "doc": "", "symbols":
["regulatory_region", "gene", "transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string",
"doc": ""}, {"name": "refSeqTranscriptId", "type": "string", "doc": ""}, {"name": "refSeqProteinId",
"type": "string", "doc": ""}, {"name": "geneName", "type": "string", "doc": ""}, {"name":
"roleInCancer", "type": ["null", {"type": "enum", "name": "RoleInCancer", "doc": "", "symbols":
["oncogene", "tumor_suppressor_gene", "both"]}], "doc": ""}]}, "doc": ""}, {"name": "soTerms",
"type": {"type": "array", "items": {"type": "record", "name": "SoTerm", "doc": "", "fields":
[{"name": "id", "type": "string", "doc": ""}, {"name": "name", "type": "string", "doc": ""}]}},
"doc": ""}, {"name": "actions", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "Action", "fields": [{"name": "actionType", "type": ["null", {"type": "enum", "name":
"ActionType", "doc": "", "symbols": ["therapy", "therapeutic", "prognosis", "diagnosis"]}]},
{"name": "evidences", "type": ["null", {"type": "array", "items": "string"}], "doc": ""}, {"name":
"drug", "type": ["null", "string"], "doc": ""}, {"name": "status", "type": ["null", {"type": "enum",
"name": "ActionStatus", "doc": "", "symbols": ["clinical", "pre_clinical"]}], "doc": ""}, {"name":
"variantActionable", "type": "boolean", "doc": ""}, {"name": "comments", "type": ["null", {"type":
"array", "items": "string"}], "doc": ""}, {"name": "url", "type": ["null", "string"], "doc": ""},
{"name": "evidenceType", "type": ["null", "string"], "doc": ""}, {"name": "source", "type":
"string", "doc": ""}]}}], "doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc":
""}, {"name": "eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type":
["null", {"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2",
"TIER3"]}], "doc": ""}]}}, "doc": ""}, {"name": "variantCalls", "type": ["null", {"type": "array",
"items": {"type": "record", "name": "VariantCall", "fields": [{"name": "sampleId", "type": "string",
"doc": ""}, {"name": "depthReference", "type": ["null", "int"], "doc": ""}, {"name":
"depthAlternate", "type": ["null", "int"], "doc": ""}, {"name": "vaf", "type": ["null", "double"],
"doc": ""}]}}], "doc": ""}, {"name": "alleleOrigins", "type": {"type": "array", "items": {"type":
"enum", "name": "AlleleOrigin", "doc": "", "symbols": ["de_novo_variant", "germline_variant",
"maternal_variant", "paternal_variant", "pedigree_specific_variant", "population_specific_variant",
"somatic_variant"]}}, "doc": ""}]}}, "doc": ""}, {"name": "structuralTieredVariants", "type":
{"type": "array", "items": {"type": "record", "name": "ReportedStructuralVariantCancer", "fields":
[{"name": "chromosome", "type": "string", "doc": ""}, {"name": "start", "type": "int", "doc": ""},
{"name": "end", "type": "int", "doc": ""}, {"name": "type", "type": {"type": "record", "name":
"StructuralVariantType", "doc": "", "fields": [{"name": "firstLevelType", "type": {"type": "enum",
"name": "StructuralVariantFirstLevelType", "doc": "", "symbols": ["DEL", "INS", "DUP", "INV", "CNV",
"DUP_TANDEM", "DEL_ME", "INS_ME"]}}, {"name": "subtype", "type": ["null", "string"]}]}, "doc": ""},
{"name": "reference", "type": "string", "doc": ""}, {"name": "alternate", "type": "string", "doc":
""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}, {"name": "alleleOrigins", "type": {"type": "array", "items":
"AlleleOrigin"}, "doc": ""}]}}, "doc": ""}, {"name": "tieringVersion", "type": "string", "doc": ""},
{"name": "workspace", "type": {"type": "array", "items": "string"}, "doc": ""}, {"name":
"additionalInfo", "type": ["null", {"type": "map", "values": "string"}], "doc": ""}, {"name":
"otherFiles", "type": ["null", {"type": "map", "values": "File"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalInfo",
"analysisUri",
"analysisVersion",
"annotationFile",
"bams",
"bigWigs",
"cancerParticipant",
"internalStudyId",
"otherFiles",
"reportRequestId",
"reportVersion",
"structuralTieredVariants",
"tieredVariants",
"tieringVersion",
"vcfs",
"versionControl",
"workspace",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'annotationFile': File,
'bams': File,
'bigWigs': File,
'cancerParticipant': CancerParticipant,
'otherFiles': File,
'structuralTieredVariants': ReportedStructuralVariantCancer,
'tieredVariants': ReportedVariantCancer,
'vcfs': File,
'versionControl': ReportVersionControl,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'annotationFile': File,
'bams': File,
'bigWigs': File,
'cancerParticipant': CancerParticipant,
'otherFiles': File,
'structuralTieredVariants': ReportedStructuralVariantCancer,
'tieredVariants': ReportedVariantCancer,
'vcfs': File,
'versionControl': ReportVersionControl,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalInfo', 'analysisUri', 'analysisVersion',
'annotationFile', 'bams', 'bigWigs', 'cancerParticipant',
'internalStudyId', 'interpretGenome', 'otherFiles',
'reportRequestId', 'reportVersion',
'structuralTieredVariants', 'tieredVariants',
'tieringVersion', 'vcfs', 'versionControl', 'workspace'
]
def __init__(self, **kwargs):
self.additionalInfo = kwargs.get(
'additionalInfo', None)
self.analysisUri = kwargs.get(
'analysisUri', None)
self.analysisVersion = kwargs.get(
'analysisVersion', None)
self.annotationFile = kwargs.get(
'annotationFile', None)
self.bams = kwargs.get(
'bams', None)
self.bigWigs = kwargs.get(
'bigWigs', None)
self.cancerParticipant = kwargs.get(
'cancerParticipant', CancerParticipant())
self.internalStudyId = kwargs.get(
'internalStudyId', None)
self.interpretGenome = kwargs.get(
'interpretGenome', False)
self.otherFiles = kwargs.get(
'otherFiles', None)
self.reportRequestId = kwargs.get(
'reportRequestId', None)
self.reportVersion = kwargs.get(
'reportVersion', None)
self.structuralTieredVariants = kwargs.get(
'structuralTieredVariants', None)
self.tieredVariants = kwargs.get(
'tieredVariants', None)
self.tieringVersion = kwargs.get(
'tieringVersion', None)
self.vcfs = kwargs.get(
'vcfs', None)
self.versionControl = kwargs.get(
'versionControl', ReportVersionControl())
self.workspace = kwargs.get(
'workspace', None)
class CancerInterpretedGenome(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "CancerInterpretedGenome", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "versionControl", "type": {"type": "record", "name": "ReportVersionControl",
"fields": [{"name": "gitVersionControl", "type": "string", "doc": "", "default": "4.2.0"}]}, "doc":
""}, {"name": "reportRequestId", "type": "string", "doc": ""}, {"name": "analysisId", "type":
"string", "doc": ""}, {"name": "reportUri", "type": "string", "doc": ""}, {"name":
"referenceDatabasesVersions", "type": {"type": "map", "values": "string"}, "doc": ""}, {"name":
"softwareVersions", "type": {"type": "map", "values": "string"}, "doc": ""}, {"name":
"reportedVariants", "type": {"type": "array", "items": {"type": "record", "name":
"ReportedVariantCancer", "fields": [{"name": "chromosome", "type": "string", "doc": ""}, {"name":
"position", "type": "int", "doc": ""}, {"name": "reference", "type": "string", "doc": ""}, {"name":
"alternate", "type": "string", "doc": ""}, {"name": "cosmicIds", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}, {"name": "clinVarIds", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}, {"name": "dbSnpId", "type": ["null", "string"], "doc": ""}, {"name":
"cdnaChange", "type": ["null", "string"], "doc": ""}, {"name": "proteinChange", "type": ["null",
"string"], "doc": ""}, {"name": "commonAf", "type": ["null", "int"], "doc": ""}, {"name": "ihp",
"type": ["null", "int"], "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type":
["null", {"type": "map", "values": "string"}], "doc": ""}, {"name":
"additionalNumericVariantAnnotations", "type": ["null", {"type": "map", "values": "float"}], "doc":
""}, {"name": "comments", "type": ["null", {"type": "array", "items": "string"}], "doc": ""},
{"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEventCancer", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"genomicFeatureCancer", "type": {"type": "record", "name": "GenomicFeatureCancer", "fields":
[{"name": "featureType", "type": {"type": "enum", "name": "FeatureTypeCancer", "doc": "", "symbols":
["regulatory_region", "gene", "transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string",
"doc": ""}, {"name": "refSeqTranscriptId", "type": "string", "doc": ""}, {"name": "refSeqProteinId",
"type": "string", "doc": ""}, {"name": "geneName", "type": "string", "doc": ""}, {"name":
"roleInCancer", "type": ["null", {"type": "enum", "name": "RoleInCancer", "doc": "", "symbols":
["oncogene", "tumor_suppressor_gene", "both"]}], "doc": ""}]}, "doc": ""}, {"name": "soTerms",
"type": {"type": "array", "items": {"type": "record", "name": "SoTerm", "doc": "", "fields":
[{"name": "id", "type": "string", "doc": ""}, {"name": "name", "type": "string", "doc": ""}]}},
"doc": ""}, {"name": "actions", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "Action", "fields": [{"name": "actionType", "type": ["null", {"type": "enum", "name":
"ActionType", "doc": "", "symbols": ["therapy", "therapeutic", "prognosis", "diagnosis"]}]},
{"name": "evidences", "type": ["null", {"type": "array", "items": "string"}], "doc": ""}, {"name":
"drug", "type": ["null", "string"], "doc": ""}, {"name": "status", "type": ["null", {"type": "enum",
"name": "ActionStatus", "doc": "", "symbols": ["clinical", "pre_clinical"]}], "doc": ""}, {"name":
"variantActionable", "type": "boolean", "doc": ""}, {"name": "comments", "type": ["null", {"type":
"array", "items": "string"}], "doc": ""}, {"name": "url", "type": ["null", "string"], "doc": ""},
{"name": "evidenceType", "type": ["null", "string"], "doc": ""}, {"name": "source", "type":
"string", "doc": ""}]}}], "doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc":
""}, {"name": "eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type":
["null", {"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2",
"TIER3"]}], "doc": ""}]}}, "doc": ""}, {"name": "variantCalls", "type": ["null", {"type": "array",
"items": {"type": "record", "name": "VariantCall", "fields": [{"name": "sampleId", "type": "string",
"doc": ""}, {"name": "depthReference", "type": ["null", "int"], "doc": ""}, {"name":
"depthAlternate", "type": ["null", "int"], "doc": ""}, {"name": "vaf", "type": ["null", "double"],
"doc": ""}]}}], "doc": ""}, {"name": "alleleOrigins", "type": {"type": "array", "items": {"type":
"enum", "name": "AlleleOrigin", "doc": "", "symbols": ["de_novo_variant", "germline_variant",
"maternal_variant", "paternal_variant", "pedigree_specific_variant", "population_specific_variant",
"somatic_variant"]}}, "doc": ""}]}}, "doc": ""}, {"name": "reportedStructuralVariants", "type":
{"type": "array", "items": {"type": "record", "name": "ReportedStructuralVariantCancer", "fields":
[{"name": "chromosome", "type": "string", "doc": ""}, {"name": "start", "type": "int", "doc": ""},
{"name": "end", "type": "int", "doc": ""}, {"name": "type", "type": {"type": "record", "name":
"StructuralVariantType", "doc": "", "fields": [{"name": "firstLevelType", "type": {"type": "enum",
"name": "StructuralVariantFirstLevelType", "doc": "", "symbols": ["DEL", "INS", "DUP", "INV", "CNV",
"DUP_TANDEM", "DEL_ME", "INS_ME"]}}, {"name": "subtype", "type": ["null", "string"]}]}, "doc": ""},
{"name": "reference", "type": "string", "doc": ""}, {"name": "alternate", "type": "string", "doc":
""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}, {"name": "alleleOrigins", "type": {"type": "array", "items":
"AlleleOrigin"}, "doc": ""}]}}, "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"analysisId",
"comments",
"referenceDatabasesVersions",
"reportRequestId",
"reportUri",
"reportedStructuralVariants",
"reportedVariants",
"softwareVersions",
"versionControl",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'reportedStructuralVariants': ReportedStructuralVariantCancer,
'reportedVariants': ReportedVariantCancer,
'versionControl': ReportVersionControl,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'reportedStructuralVariants': ReportedStructuralVariantCancer,
'reportedVariants': ReportedVariantCancer,
'versionControl': ReportVersionControl,
}
return embeddedTypes[fieldName]
__slots__ = [
'analysisId', 'comments', 'referenceDatabasesVersions',
'reportRequestId', 'reportUri', 'reportedStructuralVariants',
'reportedVariants', 'softwareVersions', 'versionControl'
]
def __init__(self, **kwargs):
self.analysisId = kwargs.get(
'analysisId', None)
self.comments = kwargs.get(
'comments', None)
self.referenceDatabasesVersions = kwargs.get(
'referenceDatabasesVersions', None)
self.reportRequestId = kwargs.get(
'reportRequestId', None)
self.reportUri = kwargs.get(
'reportUri', None)
self.reportedStructuralVariants = kwargs.get(
'reportedStructuralVariants', None)
self.reportedVariants = kwargs.get(
'reportedVariants', None)
self.softwareVersions = kwargs.get(
'softwareVersions', None)
self.versionControl = kwargs.get(
'versionControl', ReportVersionControl())
class CancerParticipant(ProtocolElement):
"""
This defines a Cancer Participant
"""
_schemaSource = """
{"type": "record", "name": "CancerParticipant", "namespace": "org.gel.models.participant.avro",
"doc": "", "fields": [{"name": "yearOfBirth", "type": ["null", "int"]}, {"name": "morphology",
"type": ["null", {"type": "array", "items": "string"}]}, {"name": "readyForAnalysis", "type":
"boolean"}, {"name": "consentStatus", "type": ["null", {"type": "record", "name": "ConsentStatus",
"doc": "", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "", "default": false},
{"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default": false}, {"name":
"secondaryFindingConsent", "type": "boolean", "doc": "", "default": false}, {"name":
"carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}], "doc": ""}, {"name":
"center", "type": ["null", "string"], "doc": ""}, {"name": "individualId", "type": "string", "doc":
""}, {"name": "primaryDiagnosisDisease", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "primaryDiagnosisSubDisease", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}, {"name": "sex", "type": {"type": "enum", "name": "Sex", "symbols":
["FEMALE", "MALE", "UNKNOWN"]}, "doc": ""}, {"name": "additionalInformation", "type": ["null",
{"type": "map", "values": "string"}], "doc": ""}, {"name": "assignedICD10", "type": ["null",
{"type": "array", "items": "string"}], "doc": ""}, {"name": "tumourSamples", "type": {"type":
"array", "items": {"type": "record", "name": "TumourSample", "fields": [{"name": "sampleId", "type":
"string", "doc": ""}, {"name": "labSampleId", "type": "int", "doc": ""}, {"name": "LDPCode", "type":
"string", "doc": ""}, {"name": "tumourId", "type": "string", "doc": ""}, {"name": "programmePhase",
"type": ["null", {"type": "enum", "name": "ProgrammePhase", "symbols": ["CRUK", "OXFORD", "CLL",
"IIP", "MAIN", "EXPT"]}], "doc": ""}, {"name": "diseaseType", "type": ["null", {"type": "enum",
"name": "diseaseType", "symbols": ["ADULT_GLIOMA", "BLADDER", "BREAST",
"CARCINOMA_OF_UNKNOWN_PRIMARY", "CHILDHOOD", "COLORECTAL", "ENDOMETRIAL_CARCINOMA", "HAEMONC",
"HEPATOPANCREATOBILIARY", "LUNG", "MALIGNANT_MELANOMA", "NASOPHARYNGEAL", "ORAL_OROPHARYNGEAL",
"OVARIAN", "PROSTATE", "RENAL", "SARCOMA", "SINONASAL", "TESTICULAR_GERM_CELL_TUMOURS",
"UPPER_GASTROINTESTINAL", "NON_HODGKINS_B_CELL_LYMPHOMA_LOW_MOD_GRADE", "CLASSICAL_HODGKINS",
"NODULAR_LYMPHOCYTE_PREDOMINANT_HODGKINS", "T_CELL_LYMPHOMA"]}], "doc": ""}, {"name":
"diseaseSubType", "type": ["null", "string"], "doc": ""}, {"name": "clinicalSampleDateTime", "type":
["null", "string"], "doc": ""}, {"name": "tumourType", "type": ["null", {"type": "enum", "name":
"TumourType", "symbols": ["PRIMARY", "METASTATIC_RECURRENCE", "RECURRENCE_OF_PRIMARY_TUMOUR",
"METASTASES"]}], "doc": ""}, {"name": "tumourContent", "type": ["null", {"type": "enum", "name":
"TumourContent", "symbols": ["High", "Medium", "Low"]}], "doc": ""}, {"name": "source", "type":
["null", {"type": "enum", "name": "SampleSource", "symbols": ["TUMOUR",
"BONE_MARROW_ASPIRATE_TUMOUR_SORTED_CELLS", "BONE_MARROW_ASPIRATE_TUMOUR_CELLS", "BLOOD", "SALIVA",
"FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "preparationMethod", "type": ["null", {"type":
"enum", "name": "PreparationMethod", "symbols": ["EDTA", "ORAGENE", "FF", "FFPE",
"CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}, {"name": "tissueSource", "type": ["null", {"type":
"enum", "name": "TissueSource", "symbols": ["BMA_TUMOUR_SORTED_CELLS", "CT_GUIDED_BIOPSY",
"ENDOSCOPIC_BIOPSY", "ENDOSCOPIC_ULTRASOUND_GUIDED_BIOPSY", "ENDOSCOPIC_ULTRASOUND_GUIDED_FNA",
"LAPAROSCOPIC_BIOPSY", "LAPAROSCOPIC_EXCISION", "MRI_GUIDED_BIOPSY", "NON_GUIDED_BIOPSY",
"SURGICAL_RESECTION", "STEREOTACTICALLY_GUIDED_BIOPSY", "USS_GUIDED_BIOPSY",
"NON_STANDARD_BIOPSY"]}], "doc": ""}, {"name": "product", "type": ["null", {"type": "enum", "name":
"Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name": "morphologyICD", "type": ["null",
"string"], "doc": ""}, {"name": "morphologySnomedCT", "type": ["null", "string"], "doc": ""},
{"name": "morphologySnomedRT", "type": ["null", "string"], "doc": ""}, {"name": "topographyICD",
"type": ["null", "string"], "doc": ""}, {"name": "topographySnomedCT", "type": ["null", "string"],
"doc": ""}, {"name": "topographySnomedRT", "type": ["null", "string"], "doc": ""}]}}}, {"name":
"germlineSamples", "type": {"type": "array", "items": {"type": "record", "name": "GermlineSample",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "LDPCode", "type": "string", "doc": ""}, {"name": "source", "type": ["null",
"SampleSource"], "doc": ""}, {"name": "product", "type": ["null", "Product"], "doc": ""}, {"name":
"preparationMethod", "type": ["null", "PreparationMethod"], "doc": ""}, {"name": "programmePhase",
"type": ["null", "ProgrammePhase"], "doc": ""}, {"name": "clinicalSampleDateTime", "type": ["null",
"string"], "doc": ""}]}}}, {"name": "matchedSamples", "type": {"type": "array", "items": {"type":
"record", "name": "MatchedSamples", "doc": "", "fields": [{"name": "germlineSampleId", "type":
["null", "string"], "doc": ""}, {"name": "tumourSampleId", "type": ["null", "string"], "doc":
""}]}}}, {"name": "versionControl", "type": ["null", {"type": "record", "name": "VersionControl",
"fields": [{"name": "GitVersionControl", "type": "string", "doc": "", "default": "1.0.3"}]}], "doc":
""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalInformation",
"assignedICD10",
"center",
"consentStatus",
"germlineSamples",
"individualId",
"matchedSamples",
"morphology",
"primaryDiagnosisDisease",
"primaryDiagnosisSubDisease",
"readyForAnalysis",
"sex",
"tumourSamples",
"versionControl",
"yearOfBirth",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'consentStatus': ConsentStatus,
'germlineSamples': GermlineSample,
'matchedSamples': MatchedSamples,
'tumourSamples': TumourSample,
'versionControl': VersionControl,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'consentStatus': ConsentStatus,
'germlineSamples': GermlineSample,
'matchedSamples': MatchedSamples,
'tumourSamples': TumourSample,
'versionControl': VersionControl,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalInformation', 'assignedICD10', 'center',
'consentStatus', 'germlineSamples', 'individualId',
'matchedSamples', 'morphology', 'primaryDiagnosisDisease',
'primaryDiagnosisSubDisease', 'readyForAnalysis', 'sex',
'tumourSamples', 'versionControl', 'yearOfBirth'
]
def __init__(self, **kwargs):
self.additionalInformation = kwargs.get(
'additionalInformation', None)
self.assignedICD10 = kwargs.get(
'assignedICD10', None)
self.center = kwargs.get(
'center', None)
self.consentStatus = kwargs.get(
'consentStatus', None)
self.germlineSamples = kwargs.get(
'germlineSamples', None)
self.individualId = kwargs.get(
'individualId', None)
self.matchedSamples = kwargs.get(
'matchedSamples', None)
self.morphology = kwargs.get(
'morphology', None)
self.primaryDiagnosisDisease = kwargs.get(
'primaryDiagnosisDisease', None)
self.primaryDiagnosisSubDisease = kwargs.get(
'primaryDiagnosisSubDisease', None)
self.readyForAnalysis = kwargs.get(
'readyForAnalysis', None)
self.sex = kwargs.get(
'sex', None)
self.tumourSamples = kwargs.get(
'tumourSamples', None)
self.versionControl = kwargs.get(
'versionControl', None)
self.yearOfBirth = kwargs.get(
'yearOfBirth', None)
class CancerSomaticVariantLevelQuestions(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "CancerSomaticVariantLevelQuestions", "namespace":
"org.gel.models.report.avro", "fields": [{"name": "variantDetails", "type": "string", "doc": ""},
{"name": "variantActionability", "type": {"type": "enum", "name": "CancerActionability", "doc": "",
"symbols": ["predicts_therapeutic_response", "prognostic", "defines_diagnosis_group",
"eligibility_for_trial", "other"]}, "doc": ""}, {"name": "otherVariantActionability", "type":
["null", "string"]}, {"name": "variantUsability", "type": {"type": "enum", "name":
"CancerUsabilitySomatic", "doc": "", "symbols": ["already_actioned", "actioned_result_of_this_wga",
"not_yet_actioned"]}, "doc": ""}, {"name": "variantTested", "type": {"type": "enum", "name":
"CancerTested", "doc": "", "symbols": ["not_indicated_for_patient_care",
"no_orthologous_test_available", "test_performed_prior_to_wga",
"technical_validation_following_wga"]}, "doc": ""}, {"name": "validationAssayType", "type":
"string", "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"otherVariantActionability",
"validationAssayType",
"variantActionability",
"variantDetails",
"variantTested",
"variantUsability",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'otherVariantActionability', 'validationAssayType',
'variantActionability', 'variantDetails', 'variantTested',
'variantUsability'
]
def __init__(self, **kwargs):
self.otherVariantActionability = kwargs.get(
'otherVariantActionability', None)
self.validationAssayType = kwargs.get(
'validationAssayType', None)
self.variantActionability = kwargs.get(
'variantActionability', None)
self.variantDetails = kwargs.get(
'variantDetails', None)
self.variantTested = kwargs.get(
'variantTested', None)
self.variantUsability = kwargs.get(
'variantUsability', None)
class CancerTested(object):
"""
An enumeration Variant tested: *
`not_indicated_for_patient_care`: No: not indicated for patient
care at this time * `no_orthologous_test_available`: No:
no orthologous test available *
`test_performed_prior_to_wga`: Yes: test performed prior to
receiving WGA (eg using standard-of-care assay such as panel
testing, or sanger sequencing) *
`technical_validation_following_WGA`: Yes: technical validation
performed/planned following receiving this WGA
"""
not_indicated_for_patient_care = "not_indicated_for_patient_care"
no_orthologous_test_available = "no_orthologous_test_available"
test_performed_prior_to_wga = "test_performed_prior_to_wga"
technical_validation_following_wga = "technical_validation_following_wga"
def __hash__(self):
return str(self).__hash__()
class CancerUsabilityGermline(object):
"""
An enumeration Variant Usability: * `already_actioned`:
Already actioned (i.e. prior to receiving this WGA) *
`actioned_result_of_this_wga`: actioned as a result of receiving
this WGA
"""
already_actioned = "already_actioned"
actioned_result_of_this_wga = "actioned_result_of_this_wga"
def __hash__(self):
return str(self).__hash__()
class CancerUsabilitySomatic(object):
"""
An enumeration Variant Usability: * `already_actioned`:
Already actioned (i.e. prior to receiving this WGA) *
`actioned_result_of_this_wga`: actioned as a result of receiving
this WGA * `not_yet_actioned`: not yet actioned, but
potentially actionable in the future
"""
already_actioned = "already_actioned"
actioned_result_of_this_wga = "actioned_result_of_this_wga"
not_yet_actioned = "not_yet_actioned"
def __hash__(self):
return str(self).__hash__()
class CaseShared(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "CaseShared", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "previousGroups", "type": {"type": "array", "items": "string"}}, {"name":
"modifiedGroups", "type": {"type": "array", "items": "string"}}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"modifiedGroups",
"previousGroups",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'modifiedGroups', 'previousGroups'
]
def __init__(self, **kwargs):
self.modifiedGroups = kwargs.get(
'modifiedGroups', None)
self.previousGroups = kwargs.get(
'previousGroups', None)
class CaseSolvedFamily(object):
"""
No documentation
"""
yes = "yes"
no = "no"
partially = "partially"
unknown = "unknown"
def __hash__(self):
return str(self).__hash__()
class ChiSquare1KGenomesPhase3Pop(ProtocolElement):
"""
Chi-square test for goodness of fit of this sample to 1000 Genomes
Phase 3 populations
"""
_schemaSource = """
{"type": "record", "name": "ChiSquare1KGenomesPhase3Pop", "namespace":
"org.gel.models.participant.avro", "doc": "", "fields": [{"name": "kgSuperPopCategory", "type":
{"type": "enum", "name": "KgSuperPopCategory", "doc": "", "symbols": ["AFR", "AMR", "EAS", "EUR",
"SAS"]}, "doc": ""}, {"name": "kgPopCategory", "type": ["null", {"type": "enum", "name":
"KgPopCategory", "doc": "", "symbols": ["ACB", "ASW", "BEB", "CDX", "CEU", "CHB", "CHS", "CLM",
"ESN", "FIN", "GBR", "GIH", "GWD", "IBS", "ITU", "JPT", "KHV", "LWK", "MSL", "MXL", "PEL", "PJL",
"PUR", "STU", "TSI", "YRI"]}], "doc": ""}, {"name": "chiSquare", "type": "double", "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"chiSquare",
"kgPopCategory",
"kgSuperPopCategory",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'chiSquare', 'kgPopCategory', 'kgSuperPopCategory'
]
def __init__(self, **kwargs):
self.chiSquare = kwargs.get(
'chiSquare', None)
self.kgPopCategory = kwargs.get(
'kgPopCategory', None)
self.kgSuperPopCategory = kwargs.get(
'kgSuperPopCategory', None)
class ClinicalReportCancer(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ClinicalReportCancer", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "interpretationRequestId", "type": "string", "doc": ""}, {"name":
"interpretationRequestVersion", "type": "string", "doc": ""}, {"name": "reportingDate", "type":
"string", "doc": ""}, {"name": "user", "type": "string", "doc": ""}, {"name": "candidateVariants",
"type": ["null", {"type": "array", "items": {"type": "record", "name": "ReportedVariantCancer",
"fields": [{"name": "chromosome", "type": "string", "doc": ""}, {"name": "position", "type": "int",
"doc": ""}, {"name": "reference", "type": "string", "doc": ""}, {"name": "alternate", "type":
"string", "doc": ""}, {"name": "cosmicIds", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "clinVarIds", "type": ["null", {"type": "array", "items": "string"}], "doc":
""}, {"name": "dbSnpId", "type": ["null", "string"], "doc": ""}, {"name": "cdnaChange", "type":
["null", "string"], "doc": ""}, {"name": "proteinChange", "type": ["null", "string"], "doc": ""},
{"name": "commonAf", "type": ["null", "int"], "doc": ""}, {"name": "ihp", "type": ["null", "int"],
"doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null",
{"type": "map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type":
"array", "items": "string"}], "doc": ""}, {"name": "reportEvents", "type": {"type": "array",
"items": {"type": "record", "name": "ReportEventCancer", "fields": [{"name": "reportEventId",
"type": "string", "doc": ""}, {"name": "genomicFeatureCancer", "type": {"type": "record", "name":
"GenomicFeatureCancer", "fields": [{"name": "featureType", "type": {"type": "enum", "name":
"FeatureTypeCancer", "doc": "", "symbols": ["regulatory_region", "gene", "transcript"]}, "doc": ""},
{"name": "ensemblId", "type": "string", "doc": ""}, {"name": "refSeqTranscriptId", "type": "string",
"doc": ""}, {"name": "refSeqProteinId", "type": "string", "doc": ""}, {"name": "geneName", "type":
"string", "doc": ""}, {"name": "roleInCancer", "type": ["null", {"type": "enum", "name":
"RoleInCancer", "doc": "", "symbols": ["oncogene", "tumor_suppressor_gene", "both"]}], "doc": ""}]},
"doc": ""}, {"name": "soTerms", "type": {"type": "array", "items": {"type": "record", "name":
"SoTerm", "doc": "", "fields": [{"name": "id", "type": "string", "doc": ""}, {"name": "name",
"type": "string", "doc": ""}]}}, "doc": ""}, {"name": "actions", "type": ["null", {"type": "array",
"items": {"type": "record", "name": "Action", "fields": [{"name": "actionType", "type": ["null",
{"type": "enum", "name": "ActionType", "doc": "", "symbols": ["therapy", "therapeutic", "prognosis",
"diagnosis"]}]}, {"name": "evidences", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "drug", "type": ["null", "string"], "doc": ""}, {"name": "status", "type":
["null", {"type": "enum", "name": "ActionStatus", "doc": "", "symbols": ["clinical",
"pre_clinical"]}], "doc": ""}, {"name": "variantActionable", "type": "boolean", "doc": ""}, {"name":
"comments", "type": ["null", {"type": "array", "items": "string"}], "doc": ""}, {"name": "url",
"type": ["null", "string"], "doc": ""}, {"name": "evidenceType", "type": ["null", "string"], "doc":
""}, {"name": "source", "type": "string", "doc": ""}]}}], "doc": ""}, {"name": "groupOfVariants",
"type": ["null", "int"], "doc": ""}, {"name": "eventJustification", "type": ["null", "string"],
"doc": ""}, {"name": "tier", "type": ["null", {"type": "enum", "name": "Tier", "doc": "", "symbols":
["NONE", "TIER1", "TIER2", "TIER3"]}], "doc": ""}]}}, "doc": ""}, {"name": "variantCalls", "type":
["null", {"type": "array", "items": {"type": "record", "name": "VariantCall", "fields": [{"name":
"sampleId", "type": "string", "doc": ""}, {"name": "depthReference", "type": ["null", "int"], "doc":
""}, {"name": "depthAlternate", "type": ["null", "int"], "doc": ""}, {"name": "vaf", "type":
["null", "double"], "doc": ""}]}}], "doc": ""}, {"name": "alleleOrigins", "type": {"type": "array",
"items": {"type": "enum", "name": "AlleleOrigin", "doc": "", "symbols": ["de_novo_variant",
"germline_variant", "maternal_variant", "paternal_variant", "pedigree_specific_variant",
"population_specific_variant", "somatic_variant"]}}, "doc": ""}]}}], "doc": ""}, {"name":
"candidateStructuralVariants", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "ReportedStructuralVariantCancer", "fields": [{"name": "chromosome", "type": "string",
"doc": ""}, {"name": "start", "type": "int", "doc": ""}, {"name": "end", "type": "int", "doc": ""},
{"name": "type", "type": {"type": "record", "name": "StructuralVariantType", "doc": "", "fields":
[{"name": "firstLevelType", "type": {"type": "enum", "name": "StructuralVariantFirstLevelType",
"doc": "", "symbols": ["DEL", "INS", "DUP", "INV", "CNV", "DUP_TANDEM", "DEL_ME", "INS_ME"]}},
{"name": "subtype", "type": ["null", "string"]}]}, "doc": ""}, {"name": "reference", "type":
"string", "doc": ""}, {"name": "alternate", "type": "string", "doc": ""}, {"name":
"additionalTextualVariantAnnotations", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type": "map", "values":
"float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "alleleOrigins", "type": {"type": "array", "items": "AlleleOrigin"}, "doc":
""}]}}], "doc": ""}, {"name": "genomicInterpretation", "type": "string", "doc": ""}, {"name":
"references", "type": ["null", {"type": "array", "items": "string"}], "doc": ""}, {"name":
"referenceDatabasesVersions", "type": {"type": "map", "values": "string"}, "doc": ""}, {"name":
"softwareVersions", "type": {"type": "map", "values": "string"}, "doc": ""}, {"name":
"genePanelsCoverage", "type": {"type": "map", "values": {"type": "array", "items": {"type": "map",
"values": "string"}}}, "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"candidateStructuralVariants",
"candidateVariants",
"genePanelsCoverage",
"genomicInterpretation",
"interpretationRequestId",
"interpretationRequestVersion",
"referenceDatabasesVersions",
"references",
"reportingDate",
"softwareVersions",
"user",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'candidateStructuralVariants': ReportedStructuralVariantCancer,
'candidateVariants': ReportedVariantCancer,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'candidateStructuralVariants': ReportedStructuralVariantCancer,
'candidateVariants': ReportedVariantCancer,
}
return embeddedTypes[fieldName]
__slots__ = [
'candidateStructuralVariants', 'candidateVariants',
'genePanelsCoverage', 'genomicInterpretation',
'interpretationRequestId', 'interpretationRequestVersion',
'referenceDatabasesVersions', 'references', 'reportingDate',
'softwareVersions', 'user'
]
def __init__(self, **kwargs):
self.candidateStructuralVariants = kwargs.get(
'candidateStructuralVariants', None)
self.candidateVariants = kwargs.get(
'candidateVariants', None)
self.genePanelsCoverage = kwargs.get(
'genePanelsCoverage', None)
self.genomicInterpretation = kwargs.get(
'genomicInterpretation', None)
self.interpretationRequestId = kwargs.get(
'interpretationRequestId', None)
self.interpretationRequestVersion = kwargs.get(
'interpretationRequestVersion', None)
self.referenceDatabasesVersions = kwargs.get(
'referenceDatabasesVersions', None)
self.references = kwargs.get(
'references', None)
self.reportingDate = kwargs.get(
'reportingDate', None)
self.softwareVersions = kwargs.get(
'softwareVersions', None)
self.user = kwargs.get(
'user', None)
class ClinicalReportRD(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ClinicalReportRD", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "interpretationRequestId", "type": "string", "doc": ""}, {"name":
"interpretationRequestVersion", "type": "string", "doc": ""}, {"name":
"interpretationRequestAnalysisVersion", "type": ["null", "string"], "doc": ""}, {"name":
"reportingDate", "type": "string", "doc": ""}, {"name": "user", "type": "string", "doc": ""},
{"name": "candidateVariants", "type": ["null", {"type": "array", "items": {"type": "record", "name":
"ReportedVariant", "fields": [{"name": "chromosome", "type": "string", "doc": ""}, {"name":
"dbSnpId", "type": ["null", "string"], "doc": ""}, {"name": "position", "type": "int", "doc": ""},
{"name": "reference", "type": "string", "doc": ""}, {"name": "alternate", "type": "string", "doc":
""}, {"name": "calledGenotypes", "type": {"type": "array", "items": {"type": "record", "name":
"CalledGenotype", "doc": "", "fields": [{"name": "gelId", "type": "string", "doc": ""}, {"name":
"sampleId", "type": "string", "doc": ""}, {"name": "genotype", "type": {"type": "enum", "name":
"Zygosity", "doc": "", "symbols": ["reference_homozygous", "heterozygous", "alternate_homozygous",
"missing", "half_missing_reference", "half_missing_alternate", "alternate_hemizigous",
"reference_hemizigous", "unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc":
""}, {"name": "depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate",
"type": ["null", "int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}},
"doc": ""}, {"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEvent", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc":
""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance",
"type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type": {"type":
"enum", "name": "Penetrance", "namespace": "org.gel.models.participant.avro", "doc": "", "symbols":
["complete", "incomplete"]}, "doc": ""}, {"name": "score", "type": "float", "doc": ""}, {"name":
"vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc": ""}, {"name":
"variantClassification", "type": ["null", {"type": "enum", "name": "VariantClassification", "doc":
"", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}}], "doc": ""}, {"name": "candidateStructuralVariants", "type":
["null", {"type": "array", "items": {"type": "record", "name": "ReportedStructuralVariant",
"fields": [{"name": "chromosome", "type": "string", "doc": ""}, {"name": "start", "type": "int",
"doc": ""}, {"name": "end", "type": "int", "doc": ""}, {"name": "type", "type": "string", "doc":
""}, {"name": "reference", "type": "string", "doc": ""}, {"name": "alternate", "type": "string",
"doc": ""}, {"name": "calledGenotypes", "type": {"type": "array", "items": "CalledGenotype"}},
{"name": "reportEvents", "type": {"type": "array", "items": "ReportEvent"}, "doc": ""}, {"name":
"additionalTextualVariantAnnotations", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values": "string"}], "doc": ""},
{"name": "additionalNumericVariantAnnotations", "type": ["null", {"type": "map", "values":
"float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}]}}], "doc": ""}, {"name": "genomicInterpretation", "type": "string", "doc": ""}, {"name":
"additionalAnalysisPanels", "type": ["null", {"type": "array", "items": {"type": "record", "name":
"AdditionalAnalysisPanel", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"panelName", "type": "string"}, {"name": "panelVersion", "type": ["null", "string"]}]}}]}, {"name":
"supportingEvidence", "type": ["null", {"type": "array", "items": "string"}], "doc": ""}, {"name":
"referenceDatabasesVersions", "type": {"type": "map", "values": "string"}, "doc": ""}, {"name":
"softwareVersions", "type": {"type": "map", "values": "string"}, "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalAnalysisPanels",
"candidateStructuralVariants",
"candidateVariants",
"genomicInterpretation",
"interpretationRequestAnalysisVersion",
"interpretationRequestId",
"interpretationRequestVersion",
"referenceDatabasesVersions",
"reportingDate",
"softwareVersions",
"supportingEvidence",
"user",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'additionalAnalysisPanels': AdditionalAnalysisPanel,
'candidateStructuralVariants': ReportedStructuralVariant,
'candidateVariants': ReportedVariant,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'additionalAnalysisPanels': AdditionalAnalysisPanel,
'candidateStructuralVariants': ReportedStructuralVariant,
'candidateVariants': ReportedVariant,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalAnalysisPanels', 'candidateStructuralVariants',
'candidateVariants', 'genomicInterpretation',
'interpretationRequestAnalysisVersion',
'interpretationRequestId', 'interpretationRequestVersion',
'referenceDatabasesVersions', 'reportingDate',
'softwareVersions', 'supportingEvidence', 'user'
]
def __init__(self, **kwargs):
self.additionalAnalysisPanels = kwargs.get(
'additionalAnalysisPanels', None)
self.candidateStructuralVariants = kwargs.get(
'candidateStructuralVariants', None)
self.candidateVariants = kwargs.get(
'candidateVariants', None)
self.genomicInterpretation = kwargs.get(
'genomicInterpretation', None)
self.interpretationRequestAnalysisVersion = kwargs.get(
'interpretationRequestAnalysisVersion', None)
self.interpretationRequestId = kwargs.get(
'interpretationRequestId', None)
self.interpretationRequestVersion = kwargs.get(
'interpretationRequestVersion', None)
self.referenceDatabasesVersions = kwargs.get(
'referenceDatabasesVersions', None)
self.reportingDate = kwargs.get(
'reportingDate', None)
self.softwareVersions = kwargs.get(
'softwareVersions', None)
self.supportingEvidence = kwargs.get(
'supportingEvidence', None)
self.user = kwargs.get(
'user', None)
class ClinicalUtility(object):
"""
No documentation
"""
none = "none"
change_in_medication = "change_in_medication"
surgical_option = "surgical_option"
additional_surveillance_for_proband_or_relatives = "additional_surveillance_for_proband_or_relatives"
clinical_trial_eligibility = "clinical_trial_eligibility"
informs_reproductive_choice = "informs_reproductive_choice"
unknown = "unknown"
other = "other"
def __hash__(self):
return str(self).__hash__()
class Code(object):
"""
This code define the change type, it can define a general change
in the case as CLOSED or can define a change in one or more
variants: * `C0`: **Case Closed successfully**: Clinical Report
was generated with **one or more Candidate Variants**. * `C1`:
**Case Closed unsuccessfully**: Clinical Report couldn't be
generated because **no Candidate Variants were found**. * `C2`:
**Case Blocked**: Errors were found in this cases and was sent to
quarantine for further investigation * `C3`: **Case Shared**: This
cases was shared with other group of users. * `C4`: **Supporting
evidence change**: One or More supporting evidence were modified
to the cases __(See ClinicalReport)__. * `C5`: **Variant added**:
One or more variant were selected as Candidate Variants. * `C6`:
**Variant removed**: One or more variant were removed as Candidate
Variants. * `C7`: **Variant modified**: One or more Candidate
Variants were modified __(Any change or comment over this variants
should be capture)__.
"""
C0 = "C0"
C1 = "C1"
C2 = "C2"
C3 = "C3"
C4 = "C4"
C5 = "C5"
C6 = "C6"
C7 = "C7"
def __hash__(self):
return str(self).__hash__()
class ComplexGeneticPhenomena(object):
"""
No documentation
"""
mosaicism = "mosaicism"
monosomy = "monosomy"
disomy = "disomy"
uniparental_disomy = "uniparental_disomy"
trisomy = "trisomy"
other_aneuploidy = "other_aneuploidy"
def __hash__(self):
return str(self).__hash__()
class ConfirmationDecision(object):
"""
No documentation
"""
yes = "yes"
no = "no"
na = "na"
def __hash__(self):
return str(self).__hash__()
class ConfirmationOutcome(object):
"""
No documentation
"""
yes = "yes"
no = "no"
na = "na"
def __hash__(self):
return str(self).__hash__()
class ConsentStatus(ProtocolElement):
"""
Consent Status
"""
_schemaSource = """
{"type": "record", "name": "ConsentStatus", "namespace": "org.gel.models.participant.avro", "doc":
"", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "", "default": false},
{"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default": false}, {"name":
"secondaryFindingConsent", "type": "boolean", "doc": "", "default": false}, {"name":
"carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'carrierStatusConsent', 'primaryFindingConsent',
'programmeConsent', 'secondaryFindingConsent'
]
def __init__(self, **kwargs):
self.carrierStatusConsent = kwargs.get(
'carrierStatusConsent', False)
self.primaryFindingConsent = kwargs.get(
'primaryFindingConsent', False)
self.programmeConsent = kwargs.get(
'programmeConsent', False)
self.secondaryFindingConsent = kwargs.get(
'secondaryFindingConsent', False)
class DeliveryTask(ProtocolElement):
"""
Defines a delivery task. A delivery task identifies an instance of
the whole delivery interaction cycle, whether successful or
not. This allows traceability and identification of failed
deliveries
"""
_schemaSource = """
{"type": "record", "name": "DeliveryTask", "namespace": "org.gel.models.report.avro", "doc": "",
"fields": [{"name": "tieringResultId", "type": "int", "doc": ""}, {"name": "basePath", "type":
"string", "doc": ""}, {"name": "protocolVersion", "type": "string"}, {"name": "analysisType",
"type": {"type": "enum", "name": "AnalysisType", "symbols": ["rare_disease", "cancer"]}}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"analysisType",
"basePath",
"protocolVersion",
"tieringResultId",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'analysisType', 'basePath', 'protocolVersion',
'tieringResultId'
]
def __init__(self, **kwargs):
self.analysisType = kwargs.get(
'analysisType', None)
self.basePath = kwargs.get(
'basePath', None)
self.protocolVersion = kwargs.get(
'protocolVersion', None)
self.tieringResultId = kwargs.get(
'tieringResultId', None)
class DiseasePenetrance(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "DiseasePenetrance", "namespace": "org.gel.models.participant.avro",
"fields": [{"name": "specificDisease", "type": "string"}, {"name": "penetrance", "type": {"type":
"enum", "name": "Penetrance", "doc": "", "symbols": ["complete", "incomplete"]}}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"penetrance",
"specificDisease",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'penetrance', 'specificDisease'
]
def __init__(self, **kwargs):
self.penetrance = kwargs.get(
'penetrance', None)
self.specificDisease = kwargs.get(
'specificDisease', None)
class Disorder(ProtocolElement):
"""
This is quite GEL specific. This is the way is stored in
ModelCatalogue and PanelApp. Currently all specific disease
titles are assigned to a disease subgroup so really only
specificDisease needs to be completed but we add the others
for generality
"""
_schemaSource = """
{"type": "record", "name": "Disorder", "namespace": "org.gel.models.participant.avro", "doc": "",
"fields": [{"name": "diseaseGroup", "type": ["null", "string"], "doc": ""}, {"name":
"diseaseSubGroup", "type": ["null", "string"], "doc": ""}, {"name": "specificDisease", "type":
["null", "string"], "doc": ""}, {"name": "ageOfOnset", "type": ["null", "float"], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"ageOfOnset",
"diseaseGroup",
"diseaseSubGroup",
"specificDisease",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'ageOfOnset', 'diseaseGroup', 'diseaseSubGroup',
'specificDisease'
]
def __init__(self, **kwargs):
self.ageOfOnset = kwargs.get(
'ageOfOnset', None)
self.diseaseGroup = kwargs.get(
'diseaseGroup', None)
self.diseaseSubGroup = kwargs.get(
'diseaseSubGroup', None)
self.specificDisease = kwargs.get(
'specificDisease', None)
class EthnicCategory(object):
"""
This is the list of ethnicities in ONS16 * `D`: Mixed: White
and Black Caribbean * `E`: Mixed: White and Black African
* `F`: Mixed: White and Asian * `G`: Mixed: Any other mixed
background * `A`: White: British * `B`: White: Irish
* `C`: White: Any other White background * `L`: Asian or
Asian British: Any other Asian background * `M`: Black or
Black British: Caribbean * `N`: Black or Black British:
African * `H`: Asian or Asian British: Indian * `J`:
Asian or Asian British: Pakistani * `K`: Asian or Asian
British: Bangladeshi * `P`: Black or Black British: Any other
Black background * `S`: Other Ethnic Groups: Any other ethnic
group * `R`: Other Ethnic Groups: Chinese * `Z`: Not
stated
"""
D = "D"
E = "E"
F = "F"
G = "G"
A = "A"
B = "B"
C = "C"
L = "L"
M = "M"
N = "N"
H = "H"
J = "J"
K = "K"
P = "P"
S = "S"
R = "R"
Z = "Z"
def __hash__(self):
return str(self).__hash__()
class FamilyLevelQuestions(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "FamilyLevelQuestions", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "caseSolvedFamily", "type": {"type": "enum", "name": "CaseSolvedFamily",
"symbols": ["yes", "no", "partially", "unknown"]}, "doc": ""}, {"name": "segregationQuestion",
"type": {"type": "enum", "name": "SegregationQuestion", "symbols": ["yes", "no"]}, "doc": ""},
{"name": "additionalComments", "type": "string", "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalComments",
"caseSolvedFamily",
"segregationQuestion",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'additionalComments', 'caseSolvedFamily',
'segregationQuestion'
]
def __init__(self, **kwargs):
self.additionalComments = kwargs.get(
'additionalComments', None)
self.caseSolvedFamily = kwargs.get(
'caseSolvedFamily', None)
self.segregationQuestion = kwargs.get(
'segregationQuestion', None)
class FamilyQCState(object):
"""
FamilyQCState
"""
noState = "noState"
passedMedicalReviewReadyForInterpretation = "passedMedicalReviewReadyForInterpretation"
passedMedicalReviewNotReadyForInterpretation = "passedMedicalReviewNotReadyForInterpretation"
queryToGel = "queryToGel"
queryToGMC = "queryToGMC"
failed = "failed"
def __hash__(self):
return str(self).__hash__()
class FeatureTypeCancer(object):
"""
The genomic feature type * regulatory_region: A region of
sequence that is involved in the control of a biological process.
SO:0005836 * gene: A region (or regions) that includes all of the
sequence elements necessary to encode a functional transcript. A
gene may include regulatory regions, transcribed regions and/or
other functional sequence regions. SO:0000704 * transcript: An RNA
synthesized on a DNA or RNA template by an RNA polymerase.
SO:0000673
"""
regulatory_region = "regulatory_region"
gene = "gene"
transcript = "transcript"
def __hash__(self):
return str(self).__hash__()
class FeatureTypes(object):
"""
No documentation
"""
RegulatoryRegion = "RegulatoryRegion"
Gene = "Gene"
Transcript = "Transcript"
def __hash__(self):
return str(self).__hash__()
class File(ProtocolElement):
"""
This defines a file This Record is defined by the sampleID and
a URI Currently SampleID can be a single String or an array of
strings if multiple samples are associated with the same file
*
"""
_schemaSource = """
{"type": "record", "name": "File", "namespace": "org.gel.models.report.avro", "doc": "", "fields":
[{"name": "sampleId", "type": ["null", {"type": "array", "items": "string"}], "doc": ""}, {"name":
"uriFile", "type": "string", "doc": ""}, {"name": "fileType", "type": {"type": "enum", "name":
"FileType", "symbols": ["BAM", "gVCF", "VCF_small", "VCF_somatic_small", "VCF_CNV",
"VCF_somatic_CNV", "VCF_SV", "VCF_somatic_SV", "VCF_SV_CNV", "SVG", "ANN", "BigWig", "MD5Sum",
"ROH", "OTHER", "PARTITION", "VARIANT_FREQUENCIES", "COVERAGE"]}}, {"name": "md5Sum", "type":
["null", "string"]}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"fileType",
"md5Sum",
"sampleId",
"uriFile",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'fileType', 'md5Sum', 'sampleId', 'uriFile'
]
def __init__(self, **kwargs):
self.fileType = kwargs.get(
'fileType', None)
self.md5Sum = kwargs.get(
'md5Sum', None)
self.sampleId = kwargs.get(
'sampleId', None)
self.uriFile = kwargs.get(
'uriFile', None)
class FileType(object):
"""
No documentation
"""
BAM = "BAM"
gVCF = "gVCF"
VCF_small = "VCF_small"
VCF_somatic_small = "VCF_somatic_small"
VCF_CNV = "VCF_CNV"
VCF_somatic_CNV = "VCF_somatic_CNV"
VCF_SV = "VCF_SV"
VCF_somatic_SV = "VCF_somatic_SV"
VCF_SV_CNV = "VCF_SV_CNV"
SVG = "SVG"
ANN = "ANN"
BigWig = "BigWig"
MD5Sum = "MD5Sum"
ROH = "ROH"
OTHER = "OTHER"
PARTITION = "PARTITION"
VARIANT_FREQUENCIES = "VARIANT_FREQUENCIES"
COVERAGE = "COVERAGE"
def __hash__(self):
return str(self).__hash__()
class GenomicFeature(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "GenomicFeature", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols":
["RegulatoryRegion", "Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string",
"doc": ""}, {"name": "hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type":
["null", {"type": "map", "values": "string"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"ensemblId",
"featureType",
"hgnc",
"otherIds",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'ensemblId', 'featureType', 'hgnc', 'otherIds'
]
def __init__(self, **kwargs):
self.ensemblId = kwargs.get(
'ensemblId', None)
self.featureType = kwargs.get(
'featureType', None)
self.hgnc = kwargs.get(
'hgnc', None)
self.otherIds = kwargs.get(
'otherIds', None)
class GenomicFeatureCancer(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "GenomicFeatureCancer", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "featureType", "type": {"type": "enum", "name": "FeatureTypeCancer", "doc": "",
"symbols": ["regulatory_region", "gene", "transcript"]}, "doc": ""}, {"name": "ensemblId", "type":
"string", "doc": ""}, {"name": "refSeqTranscriptId", "type": "string", "doc": ""}, {"name":
"refSeqProteinId", "type": "string", "doc": ""}, {"name": "geneName", "type": "string", "doc": ""},
{"name": "roleInCancer", "type": ["null", {"type": "enum", "name": "RoleInCancer", "doc": "",
"symbols": ["oncogene", "tumor_suppressor_gene", "both"]}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"ensemblId",
"featureType",
"geneName",
"refSeqProteinId",
"refSeqTranscriptId",
"roleInCancer",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'ensemblId', 'featureType', 'geneName', 'refSeqProteinId',
'refSeqTranscriptId', 'roleInCancer'
]
def __init__(self, **kwargs):
self.ensemblId = kwargs.get(
'ensemblId', None)
self.featureType = kwargs.get(
'featureType', None)
self.geneName = kwargs.get(
'geneName', None)
self.refSeqProteinId = kwargs.get(
'refSeqProteinId', None)
self.refSeqTranscriptId = kwargs.get(
'refSeqTranscriptId', None)
self.roleInCancer = kwargs.get(
'roleInCancer', None)
class GermlineSample(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "GermlineSample", "namespace": "org.gel.models.participant.avro",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "LDPCode", "type": "string", "doc": ""}, {"name": "source", "type": ["null",
{"type": "enum", "name": "SampleSource", "symbols": ["TUMOUR",
"BONE_MARROW_ASPIRATE_TUMOUR_SORTED_CELLS", "BONE_MARROW_ASPIRATE_TUMOUR_CELLS", "BLOOD", "SALIVA",
"FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "product", "type": ["null", {"type": "enum", "name":
"Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name": "preparationMethod", "type": ["null",
{"type": "enum", "name": "PreparationMethod", "symbols": ["EDTA", "ORAGENE", "FF", "FFPE",
"CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}, {"name": "programmePhase", "type": ["null",
{"type": "enum", "name": "ProgrammePhase", "symbols": ["CRUK", "OXFORD", "CLL", "IIP", "MAIN",
"EXPT"]}], "doc": ""}, {"name": "clinicalSampleDateTime", "type": ["null", "string"], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"LDPCode",
"clinicalSampleDateTime",
"labSampleId",
"preparationMethod",
"product",
"programmePhase",
"sampleId",
"source",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'LDPCode', 'clinicalSampleDateTime', 'labSampleId',
'preparationMethod', 'product', 'programmePhase', 'sampleId',
'source'
]
def __init__(self, **kwargs):
self.LDPCode = kwargs.get(
'LDPCode', None)
self.clinicalSampleDateTime = kwargs.get(
'clinicalSampleDateTime', None)
self.labSampleId = kwargs.get(
'labSampleId', None)
self.preparationMethod = kwargs.get(
'preparationMethod', None)
self.product = kwargs.get(
'product', None)
self.programmePhase = kwargs.get(
'programmePhase', None)
self.sampleId = kwargs.get(
'sampleId', None)
self.source = kwargs.get(
'source', None)
class HpoTerm(ProtocolElement):
"""
This defines an HPO term and its modifiers (possibly multiple)
If HPO term presence is unknown we don't have a entry on the list
"""
_schemaSource = """
{"type": "record", "name": "HpoTerm", "namespace": "org.gel.models.participant.avro", "doc": "",
"fields": [{"name": "term", "type": "string", "doc": ""}, {"name": "termPresence", "type": ["null",
{"type": "enum", "name": "TernaryOption", "doc": "", "symbols": ["yes", "no", "unknown"]}], "doc":
""}, {"name": "hpoBuildNumber", "type": ["null", "string"], "doc": ""}, {"name": "modifiers",
"type": ["null", {"type": "record", "name": "HpoTermModifiers", "fields": [{"name": "laterality",
"type": ["null", {"type": "enum", "name": "Laterality", "symbols": ["RIGHT", "UNILATERAL",
"BILATERAL", "LEFT"]}]}, {"name": "progression", "type": ["null", {"type": "enum", "name":
"Progression", "symbols": ["PROGRESSIVE", "NONPROGRESSIVE"]}]}, {"name": "severity", "type":
["null", {"type": "enum", "name": "Severity", "symbols": ["BORDERLINE", "MILD", "MODERATE",
"SEVERE", "PROFOUND"]}]}, {"name": "spatialPattern", "type": ["null", {"type": "enum", "name":
"SpatialPattern", "symbols": ["DISTAL", "GENERALIZED", "LOCALIZED", "PROXIMAL"]}]}]}], "doc": ""},
{"name": "ageOfOnset", "type": ["null", {"type": "enum", "name": "AgeOfOnset", "symbols":
["EMBRYONAL_ONSET", "FETAL_ONSET", "NEONATAL_ONSET", "INFANTILE_ONSET", "CHILDHOOD_ONSET",
"JUVENILE_ONSET", "YOUNG_ADULT_ONSET", "LATE_ONSET", "MIDDLE_AGE_ONSET"]}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"ageOfOnset",
"hpoBuildNumber",
"modifiers",
"term",
"termPresence",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'modifiers': HpoTermModifiers,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'modifiers': HpoTermModifiers,
}
return embeddedTypes[fieldName]
__slots__ = [
'ageOfOnset', 'hpoBuildNumber', 'modifiers', 'term',
'termPresence'
]
def __init__(self, **kwargs):
self.ageOfOnset = kwargs.get(
'ageOfOnset', None)
self.hpoBuildNumber = kwargs.get(
'hpoBuildNumber', None)
self.modifiers = kwargs.get(
'modifiers', None)
self.term = kwargs.get(
'term', None)
self.termPresence = kwargs.get(
'termPresence', None)
class HpoTermModifiers(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "HpoTermModifiers", "namespace": "org.gel.models.participant.avro",
"fields": [{"name": "laterality", "type": ["null", {"type": "enum", "name": "Laterality", "symbols":
["RIGHT", "UNILATERAL", "BILATERAL", "LEFT"]}]}, {"name": "progression", "type": ["null", {"type":
"enum", "name": "Progression", "symbols": ["PROGRESSIVE", "NONPROGRESSIVE"]}]}, {"name": "severity",
"type": ["null", {"type": "enum", "name": "Severity", "symbols": ["BORDERLINE", "MILD", "MODERATE",
"SEVERE", "PROFOUND"]}]}, {"name": "spatialPattern", "type": ["null", {"type": "enum", "name":
"SpatialPattern", "symbols": ["DISTAL", "GENERALIZED", "LOCALIZED", "PROXIMAL"]}]}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"laterality",
"progression",
"severity",
"spatialPattern",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'laterality', 'progression', 'severity', 'spatialPattern'
]
def __init__(self, **kwargs):
self.laterality = kwargs.get(
'laterality', None)
self.progression = kwargs.get(
'progression', None)
self.severity = kwargs.get(
'severity', None)
self.spatialPattern = kwargs.get(
'spatialPattern', None)
class InbreedingCoefficient(ProtocolElement):
"""
Inbreeding coefficient
"""
_schemaSource = """
{"type": "record", "name": "InbreedingCoefficient", "namespace": "org.gel.models.participant.avro",
"doc": "", "fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "program", "type":
"string", "doc": ""}, {"name": "version", "type": "string", "doc": ""}, {"name": "estimationMethod",
"type": "string", "doc": ""}, {"name": "coefficient", "type": "double", "doc": ""}, {"name":
"standardError", "type": ["null", "double"], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"coefficient",
"estimationMethod",
"program",
"sampleId",
"standardError",
"version",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'coefficient', 'estimationMethod', 'program', 'sampleId',
'standardError', 'version'
]
def __init__(self, **kwargs):
self.coefficient = kwargs.get(
'coefficient', None)
self.estimationMethod = kwargs.get(
'estimationMethod', None)
self.program = kwargs.get(
'program', None)
self.sampleId = kwargs.get(
'sampleId', None)
self.standardError = kwargs.get(
'standardError', None)
self.version = kwargs.get(
'version', None)
class InterpretationData(ProtocolElement):
"""
Represents the set of all interpretation data (excluding file
contents) to be stored in MDT for one TieringResult.
Semantic restrictions (not automatically verifiable):
- All InterpretedGenomesRD in interpretationResults refer to the
TieringResult tieringResult. - All
InterpretedGenomesRD in interpretationResults have passed the QC
stage and have been approved by the originating GMCs
"""
_schemaSource = """
{"type": "record", "name": "InterpretationData", "namespace": "org.gel.models.report.avro", "doc":
"", "fields": [{"name": "tieringResult", "type": {"type": "record", "name": "TieringResult", "doc":
"", "fields": [{"name": "versionControl", "type": {"type": "record", "name": "ReportVersionControl",
"fields": [{"name": "gitVersionControl", "type": "string", "doc": "", "default": "4.2.0"}]}, "doc":
""}, {"name": "genomeAssemblyVersion", "type": "string", "doc": "", "default": "GRCh37.p13"},
{"name": "cellbaseVersion", "type": "string", "doc": "", "default": "4.0"}, {"name": "workspace",
"type": {"type": "array", "items": "string"}, "doc": ""}, {"name": "bams", "type": {"type": "array",
"items": {"type": "record", "name": "File", "doc": "", "fields": [{"name": "sampleId", "type":
["null", {"type": "array", "items": "string"}], "doc": ""}, {"name": "uriFile", "type": "string",
"doc": ""}, {"name": "fileType", "type": {"type": "enum", "name": "FileType", "symbols": ["BAM",
"gVCF", "VCF_small", "VCF_somatic_small", "VCF_CNV", "VCF_somatic_CNV", "VCF_SV", "VCF_somatic_SV",
"VCF_SV_CNV", "SVG", "ANN", "BigWig", "MD5Sum", "ROH", "OTHER", "PARTITION", "VARIANT_FREQUENCIES",
"COVERAGE"]}}, {"name": "md5Sum", "type": ["null", "string"]}]}}, "doc": ""}, {"name": "vcfs",
"type": {"type": "array", "items": "File"}, "doc": ""}, {"name": "bigWigs", "type": ["null",
{"type": "array", "items": "File"}], "doc": ""}, {"name": "pedigreeDiagram", "type": ["null",
"File"], "doc": ""}, {"name": "annotationFile", "type": ["null", "File"], "doc": ""}, {"name":
"otherFiles", "type": ["null", {"type": "map", "values": "File"}], "doc": ""}, {"name": "pedigree",
"type": {"type": "record", "name": "Pedigree", "namespace": "org.gel.models.participant.avro",
"doc": "", "fields": [{"name": "versionControl", "type": ["null", {"type": "record", "name":
"VersionControl", "fields": [{"name": "GitVersionControl", "type": "string", "doc": "", "default":
"1.0.3"}]}], "doc": ""}, {"name": "LDPCode", "type": ["null", "string"]}, {"name": "familyId",
"type": "string", "doc": ""}, {"name": "members", "type": {"type": "array", "items": {"type":
"record", "name": "PedigreeMember", "doc": "", "fields": [{"name": "pedigreeId", "type": ["null",
"int"], "doc": ""}, {"name": "isProband", "type": ["null", "boolean"], "doc": ""}, {"name":
"participantId", "type": ["null", "string"], "doc": ""}, {"name": "participantQCState", "type":
["null", {"type": "enum", "name": "ParticipantQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}], "doc": ""}, {"name": "gelSuperFamilyId", "type": ["null",
"string"], "doc": ""}, {"name": "sex", "type": {"type": "enum", "name": "Sex", "doc": "", "symbols":
["MALE", "FEMALE", "UNKNOWN"]}, "doc": ""}, {"name": "personKaryotypicSex", "type": ["null",
{"type": "enum", "name": "PersonKaryotipicSex", "doc": "", "symbols": ["UNKNOWN", "XX", "XY", "XO",
"XXY", "XXX", "XXYY", "XXXY", "XXXX", "XYY", "OTHER"]}], "doc": ""}, {"name": "yearOfBirth", "type":
["null", "int"], "doc": ""}, {"name": "fatherId", "type": ["null", "int"], "doc": ""}, {"name":
"motherId", "type": ["null", "int"], "doc": ""}, {"name": "superFatherId", "type": ["null", "int"],
"doc": ""}, {"name": "superMotherId", "type": ["null", "int"], "doc": ""}, {"name": "twinGroup",
"type": ["null", "int"], "doc": ""}, {"name": "monozygotic", "type": ["null", {"type": "enum",
"name": "TernaryOption", "doc": "", "symbols": ["yes", "no", "unknown"]}], "doc": ""}, {"name":
"adoptedStatus", "type": ["null", {"type": "enum", "name": "AdoptedStatus", "doc": "", "symbols":
["notadopted", "adoptedin", "adoptedout"]}], "doc": ""}, {"name": "lifeStatus", "type": ["null",
{"type": "enum", "name": "LifeStatus", "doc": "", "symbols": ["ALIVE", "ABORTED", "DECEASED",
"UNBORN", "STILLBORN", "MISCARRIAGE"]}], "doc": ""}, {"name": "consanguineousParents", "type":
["null", "TernaryOption"], "doc": ""}, {"name": "affectionStatus", "type": ["null", {"type": "enum",
"name": "AffectionStatus", "doc": "", "symbols": ["UNAFFECTED", "AFFECTED", "UNCERTAIN"]}], "doc":
""}, {"name": "disorderList", "type": ["null", {"type": "array", "items": {"type": "record", "name":
"Disorder", "doc": "", "fields": [{"name": "diseaseGroup", "type": ["null", "string"], "doc": ""},
{"name": "diseaseSubGroup", "type": ["null", "string"], "doc": ""}, {"name": "specificDisease",
"type": ["null", "string"], "doc": ""}, {"name": "ageOfOnset", "type": ["null", "float"], "doc":
""}]}}], "doc": ""}, {"name": "hpoTermList", "type": ["null", {"type": "array", "items": {"type":
"record", "name": "HpoTerm", "doc": "", "fields": [{"name": "term", "type": "string", "doc": ""},
{"name": "termPresence", "type": ["null", "TernaryOption"], "doc": ""}, {"name": "hpoBuildNumber",
"type": ["null", "string"], "doc": ""}, {"name": "modifiers", "type": ["null", {"type": "record",
"name": "HpoTermModifiers", "fields": [{"name": "laterality", "type": ["null", {"type": "enum",
"name": "Laterality", "symbols": ["RIGHT", "UNILATERAL", "BILATERAL", "LEFT"]}]}, {"name":
"progression", "type": ["null", {"type": "enum", "name": "Progression", "symbols": ["PROGRESSIVE",
"NONPROGRESSIVE"]}]}, {"name": "severity", "type": ["null", {"type": "enum", "name": "Severity",
"symbols": ["BORDERLINE", "MILD", "MODERATE", "SEVERE", "PROFOUND"]}]}, {"name": "spatialPattern",
"type": ["null", {"type": "enum", "name": "SpatialPattern", "symbols": ["DISTAL", "GENERALIZED",
"LOCALIZED", "PROXIMAL"]}]}]}], "doc": ""}, {"name": "ageOfOnset", "type": ["null", {"type": "enum",
"name": "AgeOfOnset", "symbols": ["EMBRYONAL_ONSET", "FETAL_ONSET", "NEONATAL_ONSET",
"INFANTILE_ONSET", "CHILDHOOD_ONSET", "JUVENILE_ONSET", "YOUNG_ADULT_ONSET", "LATE_ONSET",
"MIDDLE_AGE_ONSET"]}], "doc": ""}]}}], "doc": ""}, {"name": "ancestries", "type": ["null", {"type":
"record", "name": "Ancestries", "doc": "", "fields": [{"name": "mothersEthnicOrigin", "type":
["null", {"type": "enum", "name": "EthnicCategory", "doc": "", "symbols": ["D", "E", "F", "G", "A",
"B", "C", "L", "M", "N", "H", "J", "K", "P", "S", "R", "Z"]}], "doc": ""}, {"name":
"mothersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"fathersEthnicOrigin", "type": ["null", "EthnicCategory"], "doc": ""}, {"name":
"fathersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"chiSquare1KGenomesPhase3Pop", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "ChiSquare1KGenomesPhase3Pop", "doc": "", "fields": [{"name": "kgSuperPopCategory", "type":
{"type": "enum", "name": "KgSuperPopCategory", "doc": "", "symbols": ["AFR", "AMR", "EAS", "EUR",
"SAS"]}, "doc": ""}, {"name": "kgPopCategory", "type": ["null", {"type": "enum", "name":
"KgPopCategory", "doc": "", "symbols": ["ACB", "ASW", "BEB", "CDX", "CEU", "CHB", "CHS", "CLM",
"ESN", "FIN", "GBR", "GIH", "GWD", "IBS", "ITU", "JPT", "KHV", "LWK", "MSL", "MXL", "PEL", "PJL",
"PUR", "STU", "TSI", "YRI"]}], "doc": ""}, {"name": "chiSquare", "type": "double", "doc": ""}]}}],
"doc": ""}]}], "doc": ""}, {"name": "consentStatus", "type": ["null", {"type": "record", "name":
"ConsentStatus", "doc": "", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "",
"default": false}, {"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default":
false}, {"name": "secondaryFindingConsent", "type": "boolean", "doc": "", "default": false},
{"name": "carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}], "doc": ""},
{"name": "samples", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Sample",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "source", "type": ["null", {"type": "enum", "name": "SampleSource", "symbols":
["BLOOD", "SALIVA", "FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "product", "type": ["null",
{"type": "enum", "name": "Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name":
"preparationMethod", "type": ["null", {"type": "enum", "name": "PreparationMethod", "symbols":
["EDTA", "ORAGENE", "FF", "FFPE", "CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}]}}], "doc": ""},
{"name": "inbreedingCoefficient", "type": ["null", {"type": "record", "name":
"InbreedingCoefficient", "doc": "", "fields": [{"name": "sampleId", "type": "string", "doc": ""},
{"name": "program", "type": "string", "doc": ""}, {"name": "version", "type": "string", "doc": ""},
{"name": "estimationMethod", "type": "string", "doc": ""}, {"name": "coefficient", "type": "double",
"doc": ""}, {"name": "standardError", "type": ["null", "double"], "doc": ""}]}], "doc": ""},
{"name": "additionalInformation", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}]}}}, {"name": "analysisPanels", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "AnalysisPanel", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"panelName", "type": "string"}, {"name": "panelVersion", "type": ["null", "string"]}, {"name":
"reviewOutcome", "type": "string"}, {"name": "multipleGeneticOrigins", "type": "string"}]}}]},
{"name": "diseasePenetrances", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "DiseasePenetrance", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"penetrance", "type": {"type": "enum", "name": "Penetrance", "doc": "", "symbols": ["complete",
"incomplete"]}}]}}]}, {"name": "readyForAnalysis", "type": "boolean"}, {"name": "familyQCState",
"type": ["null", {"type": "enum", "name": "FamilyQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}]}]}, "doc": ""}, {"name": "tieredVariants", "type": {"type":
"array", "items": {"type": "record", "name": "ReportedVariant", "fields": [{"name": "chromosome",
"type": "string", "doc": ""}, {"name": "dbSnpId", "type": ["null", "string"], "doc": ""}, {"name":
"position", "type": "int", "doc": ""}, {"name": "reference", "type": "string", "doc": ""}, {"name":
"alternate", "type": "string", "doc": ""}, {"name": "calledGenotypes", "type": {"type": "array",
"items": {"type": "record", "name": "CalledGenotype", "doc": "", "fields": [{"name": "gelId",
"type": "string", "doc": ""}, {"name": "sampleId", "type": "string", "doc": ""}, {"name":
"genotype", "type": {"type": "enum", "name": "Zygosity", "doc": "", "symbols":
["reference_homozygous", "heterozygous", "alternate_homozygous", "missing",
"half_missing_reference", "half_missing_alternate", "alternate_hemizigous", "reference_hemizigous",
"unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc": ""}, {"name":
"depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate", "type": ["null",
"int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}}, "doc": ""},
{"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEvent", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc":
""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance",
"type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type":
"org.gel.models.participant.avro.Penetrance", "doc": ""}, {"name": "score", "type": "float", "doc":
""}, {"name": "vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc":
""}, {"name": "variantClassification", "type": ["null", {"type": "enum", "name":
"VariantClassification", "doc": "", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}}, "doc": ""}, {"name": "tieringVersion", "type": "string", "doc":
""}, {"name": "internalStudyId", "type": "string", "doc": ""}, {"name": "complexGeneticPhenomena",
"type": ["null", {"type": "enum", "name": "ComplexGeneticPhenomena", "symbols": ["mosaicism",
"monosomy", "disomy", "uniparental_disomy", "trisomy", "other_aneuploidy"]}], "doc": ""}, {"name":
"otherFamilyHistory", "type": ["null", {"type": "record", "name": "OtherFamilyHistory", "doc": "",
"fields": [{"name": "maternalFamilyHistory", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "paternalFamilyHistory", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}]}], "doc": ""}, {"name": "genePanelsCoverage", "type": ["null", {"type":
"map", "values": {"type": "map", "values": {"type": "map", "values": "float"}}}], "doc": ""},
{"name": "additionalInfo", "type": ["null", {"type": "map", "values": "string"}], "doc": ""}]}},
{"name": "interpretationResults", "type": {"type": "array", "items": {"type": "record", "name":
"InterpretedGenomeRD", "fields": [{"name": "versionControl", "type": "ReportVersionControl", "doc":
""}, {"name": "interpretationRequestId", "type": "string", "doc": ""}, {"name": "analysisId",
"type": "string", "doc": ""}, {"name": "companyName", "type": "string", "doc": ""}, {"name":
"reportUri", "type": "string", "doc": ""}, {"name": "reportUrl", "type": "string", "doc": ""},
{"name": "reportedVariants", "type": {"type": "array", "items": "ReportedVariant"}, "doc": ""},
{"name": "referenceDatabasesVersions", "type": {"type": "map", "values": "string"}, "doc": ""},
{"name": "softwareVersions", "type": {"type": "map", "values": "string"}, "doc": ""}, {"name":
"reportedStructuralVariants", "type": ["null", {"type": "array", "items": {"type": "record", "name":
"ReportedStructuralVariant", "fields": [{"name": "chromosome", "type": "string", "doc": ""},
{"name": "start", "type": "int", "doc": ""}, {"name": "end", "type": "int", "doc": ""}, {"name":
"type", "type": "string", "doc": ""}, {"name": "reference", "type": "string", "doc": ""}, {"name":
"alternate", "type": "string", "doc": ""}, {"name": "calledGenotypes", "type": {"type": "array",
"items": "CalledGenotype"}}, {"name": "reportEvents", "type": {"type": "array", "items":
"ReportEvent"}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null",
{"type": "map", "values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type":
["null", {"type": "map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null",
{"type": "array", "items": "string"}], "doc": ""}]}}], "doc": ""}, {"name": "comments", "type":
["null", {"type": "array", "items": "string"}], "doc": ""}]}}}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"interpretationResults",
"tieringResult",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'interpretationResults': InterpretedGenomeRD,
'tieringResult': TieringResult,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'interpretationResults': InterpretedGenomeRD,
'tieringResult': TieringResult,
}
return embeddedTypes[fieldName]
__slots__ = [
'interpretationResults', 'tieringResult'
]
def __init__(self, **kwargs):
self.interpretationResults = kwargs.get(
'interpretationResults', None)
self.tieringResult = kwargs.get(
'tieringResult', TieringResult())
class InterpretationRequestRD(ProtocolElement):
"""
This record represents basic information for this report
"""
_schemaSource = """
{"type": "record", "name": "InterpretationRequestRD", "namespace": "org.gel.models.report.avro",
"doc": "", "fields": [{"name": "versionControl", "type": {"type": "record", "name":
"ReportVersionControl", "fields": [{"name": "gitVersionControl", "type": "string", "doc": "",
"default": "4.2.0"}]}, "doc": ""}, {"name": "interpretationRequestId", "type": "string", "doc": ""},
{"name": "internalStudyId", "type": "string", "doc": ""}, {"name": "genomeAssemblyVersion", "type":
"string", "doc": "", "default": "GRCh37.p13"}, {"name": "cellbaseVersion", "type": "string", "doc":
"", "default": "4.0"}, {"name": "interpretationRequestVersion", "type": "int", "doc": ""}, {"name":
"interpretGenome", "type": "boolean", "doc": "", "default": false}, {"name": "workspace", "type":
{"type": "array", "items": "string"}, "doc": ""}, {"name": "bams", "type": {"type": "array",
"items": {"type": "record", "name": "File", "doc": "", "fields": [{"name": "sampleId", "type":
["null", {"type": "array", "items": "string"}], "doc": ""}, {"name": "uriFile", "type": "string",
"doc": ""}, {"name": "fileType", "type": {"type": "enum", "name": "FileType", "symbols": ["BAM",
"gVCF", "VCF_small", "VCF_somatic_small", "VCF_CNV", "VCF_somatic_CNV", "VCF_SV", "VCF_somatic_SV",
"VCF_SV_CNV", "SVG", "ANN", "BigWig", "MD5Sum", "ROH", "OTHER", "PARTITION", "VARIANT_FREQUENCIES",
"COVERAGE"]}}, {"name": "md5Sum", "type": ["null", "string"]}]}}, "doc": ""}, {"name": "vcfs",
"type": {"type": "array", "items": "File"}, "doc": ""}, {"name": "bigWigs", "type": ["null",
{"type": "array", "items": "File"}], "doc": ""}, {"name": "pedigreeDiagram", "type": ["null",
"File"], "doc": ""}, {"name": "annotationFile", "type": ["null", "File"], "doc": ""}, {"name":
"otherFiles", "type": ["null", {"type": "map", "values": "File"}], "doc": ""}, {"name": "pedigree",
"type": {"type": "record", "name": "Pedigree", "namespace": "org.gel.models.participant.avro",
"doc": "", "fields": [{"name": "versionControl", "type": ["null", {"type": "record", "name":
"VersionControl", "fields": [{"name": "GitVersionControl", "type": "string", "doc": "", "default":
"1.0.3"}]}], "doc": ""}, {"name": "LDPCode", "type": ["null", "string"]}, {"name": "familyId",
"type": "string", "doc": ""}, {"name": "members", "type": {"type": "array", "items": {"type":
"record", "name": "PedigreeMember", "doc": "", "fields": [{"name": "pedigreeId", "type": ["null",
"int"], "doc": ""}, {"name": "isProband", "type": ["null", "boolean"], "doc": ""}, {"name":
"participantId", "type": ["null", "string"], "doc": ""}, {"name": "participantQCState", "type":
["null", {"type": "enum", "name": "ParticipantQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}], "doc": ""}, {"name": "gelSuperFamilyId", "type": ["null",
"string"], "doc": ""}, {"name": "sex", "type": {"type": "enum", "name": "Sex", "doc": "", "symbols":
["MALE", "FEMALE", "UNKNOWN"]}, "doc": ""}, {"name": "personKaryotypicSex", "type": ["null",
{"type": "enum", "name": "PersonKaryotipicSex", "doc": "", "symbols": ["UNKNOWN", "XX", "XY", "XO",
"XXY", "XXX", "XXYY", "XXXY", "XXXX", "XYY", "OTHER"]}], "doc": ""}, {"name": "yearOfBirth", "type":
["null", "int"], "doc": ""}, {"name": "fatherId", "type": ["null", "int"], "doc": ""}, {"name":
"motherId", "type": ["null", "int"], "doc": ""}, {"name": "superFatherId", "type": ["null", "int"],
"doc": ""}, {"name": "superMotherId", "type": ["null", "int"], "doc": ""}, {"name": "twinGroup",
"type": ["null", "int"], "doc": ""}, {"name": "monozygotic", "type": ["null", {"type": "enum",
"name": "TernaryOption", "doc": "", "symbols": ["yes", "no", "unknown"]}], "doc": ""}, {"name":
"adoptedStatus", "type": ["null", {"type": "enum", "name": "AdoptedStatus", "doc": "", "symbols":
["notadopted", "adoptedin", "adoptedout"]}], "doc": ""}, {"name": "lifeStatus", "type": ["null",
{"type": "enum", "name": "LifeStatus", "doc": "", "symbols": ["ALIVE", "ABORTED", "DECEASED",
"UNBORN", "STILLBORN", "MISCARRIAGE"]}], "doc": ""}, {"name": "consanguineousParents", "type":
["null", "TernaryOption"], "doc": ""}, {"name": "affectionStatus", "type": ["null", {"type": "enum",
"name": "AffectionStatus", "doc": "", "symbols": ["UNAFFECTED", "AFFECTED", "UNCERTAIN"]}], "doc":
""}, {"name": "disorderList", "type": ["null", {"type": "array", "items": {"type": "record", "name":
"Disorder", "doc": "", "fields": [{"name": "diseaseGroup", "type": ["null", "string"], "doc": ""},
{"name": "diseaseSubGroup", "type": ["null", "string"], "doc": ""}, {"name": "specificDisease",
"type": ["null", "string"], "doc": ""}, {"name": "ageOfOnset", "type": ["null", "float"], "doc":
""}]}}], "doc": ""}, {"name": "hpoTermList", "type": ["null", {"type": "array", "items": {"type":
"record", "name": "HpoTerm", "doc": "", "fields": [{"name": "term", "type": "string", "doc": ""},
{"name": "termPresence", "type": ["null", "TernaryOption"], "doc": ""}, {"name": "hpoBuildNumber",
"type": ["null", "string"], "doc": ""}, {"name": "modifiers", "type": ["null", {"type": "record",
"name": "HpoTermModifiers", "fields": [{"name": "laterality", "type": ["null", {"type": "enum",
"name": "Laterality", "symbols": ["RIGHT", "UNILATERAL", "BILATERAL", "LEFT"]}]}, {"name":
"progression", "type": ["null", {"type": "enum", "name": "Progression", "symbols": ["PROGRESSIVE",
"NONPROGRESSIVE"]}]}, {"name": "severity", "type": ["null", {"type": "enum", "name": "Severity",
"symbols": ["BORDERLINE", "MILD", "MODERATE", "SEVERE", "PROFOUND"]}]}, {"name": "spatialPattern",
"type": ["null", {"type": "enum", "name": "SpatialPattern", "symbols": ["DISTAL", "GENERALIZED",
"LOCALIZED", "PROXIMAL"]}]}]}], "doc": ""}, {"name": "ageOfOnset", "type": ["null", {"type": "enum",
"name": "AgeOfOnset", "symbols": ["EMBRYONAL_ONSET", "FETAL_ONSET", "NEONATAL_ONSET",
"INFANTILE_ONSET", "CHILDHOOD_ONSET", "JUVENILE_ONSET", "YOUNG_ADULT_ONSET", "LATE_ONSET",
"MIDDLE_AGE_ONSET"]}], "doc": ""}]}}], "doc": ""}, {"name": "ancestries", "type": ["null", {"type":
"record", "name": "Ancestries", "doc": "", "fields": [{"name": "mothersEthnicOrigin", "type":
["null", {"type": "enum", "name": "EthnicCategory", "doc": "", "symbols": ["D", "E", "F", "G", "A",
"B", "C", "L", "M", "N", "H", "J", "K", "P", "S", "R", "Z"]}], "doc": ""}, {"name":
"mothersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"fathersEthnicOrigin", "type": ["null", "EthnicCategory"], "doc": ""}, {"name":
"fathersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"chiSquare1KGenomesPhase3Pop", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "ChiSquare1KGenomesPhase3Pop", "doc": "", "fields": [{"name": "kgSuperPopCategory", "type":
{"type": "enum", "name": "KgSuperPopCategory", "doc": "", "symbols": ["AFR", "AMR", "EAS", "EUR",
"SAS"]}, "doc": ""}, {"name": "kgPopCategory", "type": ["null", {"type": "enum", "name":
"KgPopCategory", "doc": "", "symbols": ["ACB", "ASW", "BEB", "CDX", "CEU", "CHB", "CHS", "CLM",
"ESN", "FIN", "GBR", "GIH", "GWD", "IBS", "ITU", "JPT", "KHV", "LWK", "MSL", "MXL", "PEL", "PJL",
"PUR", "STU", "TSI", "YRI"]}], "doc": ""}, {"name": "chiSquare", "type": "double", "doc": ""}]}}],
"doc": ""}]}], "doc": ""}, {"name": "consentStatus", "type": ["null", {"type": "record", "name":
"ConsentStatus", "doc": "", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "",
"default": false}, {"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default":
false}, {"name": "secondaryFindingConsent", "type": "boolean", "doc": "", "default": false},
{"name": "carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}], "doc": ""},
{"name": "samples", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Sample",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "source", "type": ["null", {"type": "enum", "name": "SampleSource", "symbols":
["BLOOD", "SALIVA", "FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "product", "type": ["null",
{"type": "enum", "name": "Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name":
"preparationMethod", "type": ["null", {"type": "enum", "name": "PreparationMethod", "symbols":
["EDTA", "ORAGENE", "FF", "FFPE", "CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}]}}], "doc": ""},
{"name": "inbreedingCoefficient", "type": ["null", {"type": "record", "name":
"InbreedingCoefficient", "doc": "", "fields": [{"name": "sampleId", "type": "string", "doc": ""},
{"name": "program", "type": "string", "doc": ""}, {"name": "version", "type": "string", "doc": ""},
{"name": "estimationMethod", "type": "string", "doc": ""}, {"name": "coefficient", "type": "double",
"doc": ""}, {"name": "standardError", "type": ["null", "double"], "doc": ""}]}], "doc": ""},
{"name": "additionalInformation", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}]}}}, {"name": "analysisPanels", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "AnalysisPanel", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"panelName", "type": "string"}, {"name": "panelVersion", "type": ["null", "string"]}, {"name":
"reviewOutcome", "type": "string"}, {"name": "multipleGeneticOrigins", "type": "string"}]}}]},
{"name": "diseasePenetrances", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "DiseasePenetrance", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"penetrance", "type": {"type": "enum", "name": "Penetrance", "doc": "", "symbols": ["complete",
"incomplete"]}}]}}]}, {"name": "readyForAnalysis", "type": "boolean"}, {"name": "familyQCState",
"type": ["null", {"type": "enum", "name": "FamilyQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}]}]}, "doc": ""}, {"name": "tieredVariants", "type": {"type":
"array", "items": {"type": "record", "name": "ReportedVariant", "fields": [{"name": "chromosome",
"type": "string", "doc": ""}, {"name": "dbSnpId", "type": ["null", "string"], "doc": ""}, {"name":
"position", "type": "int", "doc": ""}, {"name": "reference", "type": "string", "doc": ""}, {"name":
"alternate", "type": "string", "doc": ""}, {"name": "calledGenotypes", "type": {"type": "array",
"items": {"type": "record", "name": "CalledGenotype", "doc": "", "fields": [{"name": "gelId",
"type": "string", "doc": ""}, {"name": "sampleId", "type": "string", "doc": ""}, {"name":
"genotype", "type": {"type": "enum", "name": "Zygosity", "doc": "", "symbols":
["reference_homozygous", "heterozygous", "alternate_homozygous", "missing",
"half_missing_reference", "half_missing_alternate", "alternate_hemizigous", "reference_hemizigous",
"unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc": ""}, {"name":
"depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate", "type": ["null",
"int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}}, "doc": ""},
{"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEvent", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc":
""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance",
"type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type":
"org.gel.models.participant.avro.Penetrance", "doc": ""}, {"name": "score", "type": "float", "doc":
""}, {"name": "vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc":
""}, {"name": "variantClassification", "type": ["null", {"type": "enum", "name":
"VariantClassification", "doc": "", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}}, "doc": ""}, {"name": "tieringVersion", "type": "string", "doc":
""}, {"name": "complexGeneticPhenomena", "type": ["null", {"type": "enum", "name":
"ComplexGeneticPhenomena", "symbols": ["mosaicism", "monosomy", "disomy", "uniparental_disomy",
"trisomy", "other_aneuploidy"]}], "doc": ""}, {"name": "otherFamilyHistory", "type": ["null",
{"type": "record", "name": "OtherFamilyHistory", "doc": "", "fields": [{"name":
"maternalFamilyHistory", "type": ["null", {"type": "array", "items": "string"}], "doc": ""},
{"name": "paternalFamilyHistory", "type": ["null", {"type": "array", "items": "string"}], "doc":
""}]}], "doc": ""}, {"name": "analysisReturnUri", "type": "string", "doc": ""}, {"name":
"analysisVersion", "type": ["null", "string"], "doc": ""}, {"name": "genePanelsCoverage", "type":
["null", {"type": "map", "values": {"type": "map", "values": {"type": "map", "values": "float"}}}],
"doc": ""}, {"name": "additionalInfo", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalInfo",
"analysisReturnUri",
"analysisVersion",
"annotationFile",
"bams",
"bigWigs",
"complexGeneticPhenomena",
"genePanelsCoverage",
"internalStudyId",
"interpretationRequestId",
"interpretationRequestVersion",
"otherFamilyHistory",
"otherFiles",
"pedigree",
"pedigreeDiagram",
"tieredVariants",
"tieringVersion",
"vcfs",
"versionControl",
"workspace",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'annotationFile': File,
'bams': File,
'bigWigs': File,
'otherFamilyHistory': OtherFamilyHistory,
'otherFiles': File,
'pedigree': Pedigree,
'pedigreeDiagram': File,
'tieredVariants': ReportedVariant,
'vcfs': File,
'versionControl': ReportVersionControl,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'annotationFile': File,
'bams': File,
'bigWigs': File,
'otherFamilyHistory': OtherFamilyHistory,
'otherFiles': File,
'pedigree': Pedigree,
'pedigreeDiagram': File,
'tieredVariants': ReportedVariant,
'vcfs': File,
'versionControl': ReportVersionControl,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalInfo', 'analysisReturnUri', 'analysisVersion',
'annotationFile', 'bams', 'bigWigs', 'cellbaseVersion',
'complexGeneticPhenomena', 'genePanelsCoverage',
'genomeAssemblyVersion', 'internalStudyId', 'interpretGenome',
'interpretationRequestId', 'interpretationRequestVersion',
'otherFamilyHistory', 'otherFiles', 'pedigree',
'pedigreeDiagram', 'tieredVariants', 'tieringVersion', 'vcfs',
'versionControl', 'workspace'
]
def __init__(self, **kwargs):
self.additionalInfo = kwargs.get(
'additionalInfo', None)
self.analysisReturnUri = kwargs.get(
'analysisReturnUri', None)
self.analysisVersion = kwargs.get(
'analysisVersion', None)
self.annotationFile = kwargs.get(
'annotationFile', None)
self.bams = kwargs.get(
'bams', None)
self.bigWigs = kwargs.get(
'bigWigs', None)
self.cellbaseVersion = kwargs.get(
'cellbaseVersion', '4.0')
self.complexGeneticPhenomena = kwargs.get(
'complexGeneticPhenomena', None)
self.genePanelsCoverage = kwargs.get(
'genePanelsCoverage', None)
self.genomeAssemblyVersion = kwargs.get(
'genomeAssemblyVersion', 'GRCh37.p13')
self.internalStudyId = kwargs.get(
'internalStudyId', None)
self.interpretGenome = kwargs.get(
'interpretGenome', False)
self.interpretationRequestId = kwargs.get(
'interpretationRequestId', None)
self.interpretationRequestVersion = kwargs.get(
'interpretationRequestVersion', None)
self.otherFamilyHistory = kwargs.get(
'otherFamilyHistory', None)
self.otherFiles = kwargs.get(
'otherFiles', None)
self.pedigree = kwargs.get(
'pedigree', Pedigree())
self.pedigreeDiagram = kwargs.get(
'pedigreeDiagram', None)
self.tieredVariants = kwargs.get(
'tieredVariants', None)
self.tieringVersion = kwargs.get(
'tieringVersion', None)
self.vcfs = kwargs.get(
'vcfs', None)
self.versionControl = kwargs.get(
'versionControl', ReportVersionControl())
self.workspace = kwargs.get(
'workspace', None)
class InterpretedGenomeRD(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "InterpretedGenomeRD", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "versionControl", "type": {"type": "record", "name": "ReportVersionControl",
"fields": [{"name": "gitVersionControl", "type": "string", "doc": "", "default": "4.2.0"}]}, "doc":
""}, {"name": "interpretationRequestId", "type": "string", "doc": ""}, {"name": "analysisId",
"type": "string", "doc": ""}, {"name": "companyName", "type": "string", "doc": ""}, {"name":
"reportUri", "type": "string", "doc": ""}, {"name": "reportUrl", "type": "string", "doc": ""},
{"name": "reportedVariants", "type": {"type": "array", "items": {"type": "record", "name":
"ReportedVariant", "fields": [{"name": "chromosome", "type": "string", "doc": ""}, {"name":
"dbSnpId", "type": ["null", "string"], "doc": ""}, {"name": "position", "type": "int", "doc": ""},
{"name": "reference", "type": "string", "doc": ""}, {"name": "alternate", "type": "string", "doc":
""}, {"name": "calledGenotypes", "type": {"type": "array", "items": {"type": "record", "name":
"CalledGenotype", "doc": "", "fields": [{"name": "gelId", "type": "string", "doc": ""}, {"name":
"sampleId", "type": "string", "doc": ""}, {"name": "genotype", "type": {"type": "enum", "name":
"Zygosity", "doc": "", "symbols": ["reference_homozygous", "heterozygous", "alternate_homozygous",
"missing", "half_missing_reference", "half_missing_alternate", "alternate_hemizigous",
"reference_hemizigous", "unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc":
""}, {"name": "depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate",
"type": ["null", "int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}},
"doc": ""}, {"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEvent", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc":
""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance",
"type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type": {"type":
"enum", "name": "Penetrance", "namespace": "org.gel.models.participant.avro", "doc": "", "symbols":
["complete", "incomplete"]}, "doc": ""}, {"name": "score", "type": "float", "doc": ""}, {"name":
"vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc": ""}, {"name":
"variantClassification", "type": ["null", {"type": "enum", "name": "VariantClassification", "doc":
"", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}}, "doc": ""}, {"name": "referenceDatabasesVersions", "type":
{"type": "map", "values": "string"}, "doc": ""}, {"name": "softwareVersions", "type": {"type":
"map", "values": "string"}, "doc": ""}, {"name": "reportedStructuralVariants", "type": ["null",
{"type": "array", "items": {"type": "record", "name": "ReportedStructuralVariant", "fields":
[{"name": "chromosome", "type": "string", "doc": ""}, {"name": "start", "type": "int", "doc": ""},
{"name": "end", "type": "int", "doc": ""}, {"name": "type", "type": "string", "doc": ""}, {"name":
"reference", "type": "string", "doc": ""}, {"name": "alternate", "type": "string", "doc": ""},
{"name": "calledGenotypes", "type": {"type": "array", "items": "CalledGenotype"}}, {"name":
"reportEvents", "type": {"type": "array", "items": "ReportEvent"}, "doc": ""}, {"name":
"additionalTextualVariantAnnotations", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values": "string"}], "doc": ""},
{"name": "additionalNumericVariantAnnotations", "type": ["null", {"type": "map", "values":
"float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}]}}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"analysisId",
"comments",
"companyName",
"interpretationRequestId",
"referenceDatabasesVersions",
"reportUri",
"reportUrl",
"reportedStructuralVariants",
"reportedVariants",
"softwareVersions",
"versionControl",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'reportedStructuralVariants': ReportedStructuralVariant,
'reportedVariants': ReportedVariant,
'versionControl': ReportVersionControl,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'reportedStructuralVariants': ReportedStructuralVariant,
'reportedVariants': ReportedVariant,
'versionControl': ReportVersionControl,
}
return embeddedTypes[fieldName]
__slots__ = [
'analysisId', 'comments', 'companyName',
'interpretationRequestId', 'referenceDatabasesVersions',
'reportUri', 'reportUrl', 'reportedStructuralVariants',
'reportedVariants', 'softwareVersions', 'versionControl'
]
def __init__(self, **kwargs):
self.analysisId = kwargs.get(
'analysisId', None)
self.comments = kwargs.get(
'comments', None)
self.companyName = kwargs.get(
'companyName', None)
self.interpretationRequestId = kwargs.get(
'interpretationRequestId', None)
self.referenceDatabasesVersions = kwargs.get(
'referenceDatabasesVersions', None)
self.reportUri = kwargs.get(
'reportUri', None)
self.reportUrl = kwargs.get(
'reportUrl', None)
self.reportedStructuralVariants = kwargs.get(
'reportedStructuralVariants', None)
self.reportedVariants = kwargs.get(
'reportedVariants', None)
self.softwareVersions = kwargs.get(
'softwareVersions', None)
self.versionControl = kwargs.get(
'versionControl', ReportVersionControl())
class KgPopCategory(object):
"""
1K Population
"""
ACB = "ACB"
ASW = "ASW"
BEB = "BEB"
CDX = "CDX"
CEU = "CEU"
CHB = "CHB"
CHS = "CHS"
CLM = "CLM"
ESN = "ESN"
FIN = "FIN"
GBR = "GBR"
GIH = "GIH"
GWD = "GWD"
IBS = "IBS"
ITU = "ITU"
JPT = "JPT"
KHV = "KHV"
LWK = "LWK"
MSL = "MSL"
MXL = "MXL"
PEL = "PEL"
PJL = "PJL"
PUR = "PUR"
STU = "STU"
TSI = "TSI"
YRI = "YRI"
def __hash__(self):
return str(self).__hash__()
class KgSuperPopCategory(object):
"""
1K Super Population
"""
AFR = "AFR"
AMR = "AMR"
EAS = "EAS"
EUR = "EUR"
SAS = "SAS"
def __hash__(self):
return str(self).__hash__()
class Laterality(object):
"""
No documentation
"""
RIGHT = "RIGHT"
UNILATERAL = "UNILATERAL"
BILATERAL = "BILATERAL"
LEFT = "LEFT"
def __hash__(self):
return str(self).__hash__()
class LifeStatus(object):
"""
Life Status
"""
ALIVE = "ALIVE"
ABORTED = "ABORTED"
DECEASED = "DECEASED"
UNBORN = "UNBORN"
STILLBORN = "STILLBORN"
MISCARRIAGE = "MISCARRIAGE"
def __hash__(self):
return str(self).__hash__()
class MatchedSamples(ProtocolElement):
"""
This defines a pair of germline and tumor, this pair should/must
be analyzed together
"""
_schemaSource = """
{"type": "record", "name": "MatchedSamples", "namespace": "org.gel.models.participant.avro", "doc":
"", "fields": [{"name": "germlineSampleId", "type": ["null", "string"], "doc": ""}, {"name":
"tumourSampleId", "type": ["null", "string"], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"germlineSampleId",
"tumourSampleId",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'germlineSampleId', 'tumourSampleId'
]
def __init__(self, **kwargs):
self.germlineSampleId = kwargs.get(
'germlineSampleId', None)
self.tumourSampleId = kwargs.get(
'tumourSampleId', None)
class Method(object):
"""
No documentation
"""
RESECTION = "RESECTION"
BIOPSY = "BIOPSY"
BLOOD = "BLOOD"
def __hash__(self):
return str(self).__hash__()
class ModifiedVariant(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ModifiedVariant", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "previousVariant", "type": {"type": "record", "name": "ReportedVariant", "fields":
[{"name": "chromosome", "type": "string", "doc": ""}, {"name": "dbSnpId", "type": ["null",
"string"], "doc": ""}, {"name": "position", "type": "int", "doc": ""}, {"name": "reference", "type":
"string", "doc": ""}, {"name": "alternate", "type": "string", "doc": ""}, {"name":
"calledGenotypes", "type": {"type": "array", "items": {"type": "record", "name": "CalledGenotype",
"doc": "", "fields": [{"name": "gelId", "type": "string", "doc": ""}, {"name": "sampleId", "type":
"string", "doc": ""}, {"name": "genotype", "type": {"type": "enum", "name": "Zygosity", "doc": "",
"symbols": ["reference_homozygous", "heterozygous", "alternate_homozygous", "missing",
"half_missing_reference", "half_missing_alternate", "alternate_hemizigous", "reference_hemizigous",
"unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc": ""}, {"name":
"depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate", "type": ["null",
"int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}}, "doc": ""},
{"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEvent", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc":
""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance",
"type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type": {"type":
"enum", "name": "Penetrance", "namespace": "org.gel.models.participant.avro", "doc": "", "symbols":
["complete", "incomplete"]}, "doc": ""}, {"name": "score", "type": "float", "doc": ""}, {"name":
"vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc": ""}, {"name":
"variantClassification", "type": ["null", {"type": "enum", "name": "VariantClassification", "doc":
"", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}}, {"name": "modifiedVariant", "type": "ReportedVariant"}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"modifiedVariant",
"previousVariant",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'modifiedVariant': ReportedVariant,
'previousVariant': ReportedVariant,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'modifiedVariant': ReportedVariant,
'previousVariant': ReportedVariant,
}
return embeddedTypes[fieldName]
__slots__ = [
'modifiedVariant', 'previousVariant'
]
def __init__(self, **kwargs):
self.modifiedVariant = kwargs.get(
'modifiedVariant', ReportedVariant())
self.previousVariant = kwargs.get(
'previousVariant', ReportedVariant())
class OtherFamilyHistory(ProtocolElement):
"""
Family history for secondary findings. Arrays of strings
describing discrete family history phenotypes. Usually:
`EndocrineTumours`, `colorectal`, `BreastOvarian` and `HDOrStroke`
but can be others
"""
_schemaSource = """
{"type": "record", "name": "OtherFamilyHistory", "namespace": "org.gel.models.report.avro", "doc":
"", "fields": [{"name": "maternalFamilyHistory", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}, {"name": "paternalFamilyHistory", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"maternalFamilyHistory",
"paternalFamilyHistory",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'maternalFamilyHistory', 'paternalFamilyHistory'
]
def __init__(self, **kwargs):
self.maternalFamilyHistory = kwargs.get(
'maternalFamilyHistory', None)
self.paternalFamilyHistory = kwargs.get(
'paternalFamilyHistory', None)
class ParticipantQCState(object):
"""
QCState Status
"""
noState = "noState"
passedMedicalReviewReadyForInterpretation = "passedMedicalReviewReadyForInterpretation"
passedMedicalReviewNotReadyForInterpretation = "passedMedicalReviewNotReadyForInterpretation"
queryToGel = "queryToGel"
queryToGMC = "queryToGMC"
failed = "failed"
def __hash__(self):
return str(self).__hash__()
class Pedigree(ProtocolElement):
"""
This is the concept of a family with associated phenotypes as
present in the record RDParticipant
"""
_schemaSource = """
{"type": "record", "name": "Pedigree", "namespace": "org.gel.models.participant.avro", "doc": "",
"fields": [{"name": "versionControl", "type": ["null", {"type": "record", "name": "VersionControl",
"fields": [{"name": "GitVersionControl", "type": "string", "doc": "", "default": "1.0.3"}]}], "doc":
""}, {"name": "LDPCode", "type": ["null", "string"]}, {"name": "familyId", "type": "string", "doc":
""}, {"name": "members", "type": {"type": "array", "items": {"type": "record", "name":
"PedigreeMember", "doc": "", "fields": [{"name": "pedigreeId", "type": ["null", "int"], "doc": ""},
{"name": "isProband", "type": ["null", "boolean"], "doc": ""}, {"name": "participantId", "type":
["null", "string"], "doc": ""}, {"name": "participantQCState", "type": ["null", {"type": "enum",
"name": "ParticipantQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}], "doc": ""}, {"name": "gelSuperFamilyId", "type": ["null",
"string"], "doc": ""}, {"name": "sex", "type": {"type": "enum", "name": "Sex", "doc": "", "symbols":
["MALE", "FEMALE", "UNKNOWN"]}, "doc": ""}, {"name": "personKaryotypicSex", "type": ["null",
{"type": "enum", "name": "PersonKaryotipicSex", "doc": "", "symbols": ["UNKNOWN", "XX", "XY", "XO",
"XXY", "XXX", "XXYY", "XXXY", "XXXX", "XYY", "OTHER"]}], "doc": ""}, {"name": "yearOfBirth", "type":
["null", "int"], "doc": ""}, {"name": "fatherId", "type": ["null", "int"], "doc": ""}, {"name":
"motherId", "type": ["null", "int"], "doc": ""}, {"name": "superFatherId", "type": ["null", "int"],
"doc": ""}, {"name": "superMotherId", "type": ["null", "int"], "doc": ""}, {"name": "twinGroup",
"type": ["null", "int"], "doc": ""}, {"name": "monozygotic", "type": ["null", {"type": "enum",
"name": "TernaryOption", "doc": "", "symbols": ["yes", "no", "unknown"]}], "doc": ""}, {"name":
"adoptedStatus", "type": ["null", {"type": "enum", "name": "AdoptedStatus", "doc": "", "symbols":
["notadopted", "adoptedin", "adoptedout"]}], "doc": ""}, {"name": "lifeStatus", "type": ["null",
{"type": "enum", "name": "LifeStatus", "doc": "", "symbols": ["ALIVE", "ABORTED", "DECEASED",
"UNBORN", "STILLBORN", "MISCARRIAGE"]}], "doc": ""}, {"name": "consanguineousParents", "type":
["null", "TernaryOption"], "doc": ""}, {"name": "affectionStatus", "type": ["null", {"type": "enum",
"name": "AffectionStatus", "doc": "", "symbols": ["UNAFFECTED", "AFFECTED", "UNCERTAIN"]}], "doc":
""}, {"name": "disorderList", "type": ["null", {"type": "array", "items": {"type": "record", "name":
"Disorder", "doc": "", "fields": [{"name": "diseaseGroup", "type": ["null", "string"], "doc": ""},
{"name": "diseaseSubGroup", "type": ["null", "string"], "doc": ""}, {"name": "specificDisease",
"type": ["null", "string"], "doc": ""}, {"name": "ageOfOnset", "type": ["null", "float"], "doc":
""}]}}], "doc": ""}, {"name": "hpoTermList", "type": ["null", {"type": "array", "items": {"type":
"record", "name": "HpoTerm", "doc": "", "fields": [{"name": "term", "type": "string", "doc": ""},
{"name": "termPresence", "type": ["null", "TernaryOption"], "doc": ""}, {"name": "hpoBuildNumber",
"type": ["null", "string"], "doc": ""}, {"name": "modifiers", "type": ["null", {"type": "record",
"name": "HpoTermModifiers", "fields": [{"name": "laterality", "type": ["null", {"type": "enum",
"name": "Laterality", "symbols": ["RIGHT", "UNILATERAL", "BILATERAL", "LEFT"]}]}, {"name":
"progression", "type": ["null", {"type": "enum", "name": "Progression", "symbols": ["PROGRESSIVE",
"NONPROGRESSIVE"]}]}, {"name": "severity", "type": ["null", {"type": "enum", "name": "Severity",
"symbols": ["BORDERLINE", "MILD", "MODERATE", "SEVERE", "PROFOUND"]}]}, {"name": "spatialPattern",
"type": ["null", {"type": "enum", "name": "SpatialPattern", "symbols": ["DISTAL", "GENERALIZED",
"LOCALIZED", "PROXIMAL"]}]}]}], "doc": ""}, {"name": "ageOfOnset", "type": ["null", {"type": "enum",
"name": "AgeOfOnset", "symbols": ["EMBRYONAL_ONSET", "FETAL_ONSET", "NEONATAL_ONSET",
"INFANTILE_ONSET", "CHILDHOOD_ONSET", "JUVENILE_ONSET", "YOUNG_ADULT_ONSET", "LATE_ONSET",
"MIDDLE_AGE_ONSET"]}], "doc": ""}]}}], "doc": ""}, {"name": "ancestries", "type": ["null", {"type":
"record", "name": "Ancestries", "doc": "", "fields": [{"name": "mothersEthnicOrigin", "type":
["null", {"type": "enum", "name": "EthnicCategory", "doc": "", "symbols": ["D", "E", "F", "G", "A",
"B", "C", "L", "M", "N", "H", "J", "K", "P", "S", "R", "Z"]}], "doc": ""}, {"name":
"mothersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"fathersEthnicOrigin", "type": ["null", "EthnicCategory"], "doc": ""}, {"name":
"fathersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"chiSquare1KGenomesPhase3Pop", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "ChiSquare1KGenomesPhase3Pop", "doc": "", "fields": [{"name": "kgSuperPopCategory", "type":
{"type": "enum", "name": "KgSuperPopCategory", "doc": "", "symbols": ["AFR", "AMR", "EAS", "EUR",
"SAS"]}, "doc": ""}, {"name": "kgPopCategory", "type": ["null", {"type": "enum", "name":
"KgPopCategory", "doc": "", "symbols": ["ACB", "ASW", "BEB", "CDX", "CEU", "CHB", "CHS", "CLM",
"ESN", "FIN", "GBR", "GIH", "GWD", "IBS", "ITU", "JPT", "KHV", "LWK", "MSL", "MXL", "PEL", "PJL",
"PUR", "STU", "TSI", "YRI"]}], "doc": ""}, {"name": "chiSquare", "type": "double", "doc": ""}]}}],
"doc": ""}]}], "doc": ""}, {"name": "consentStatus", "type": ["null", {"type": "record", "name":
"ConsentStatus", "doc": "", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "",
"default": false}, {"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default":
false}, {"name": "secondaryFindingConsent", "type": "boolean", "doc": "", "default": false},
{"name": "carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}], "doc": ""},
{"name": "samples", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Sample",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "source", "type": ["null", {"type": "enum", "name": "SampleSource", "symbols":
["BLOOD", "SALIVA", "FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "product", "type": ["null",
{"type": "enum", "name": "Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name":
"preparationMethod", "type": ["null", {"type": "enum", "name": "PreparationMethod", "symbols":
["EDTA", "ORAGENE", "FF", "FFPE", "CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}]}}], "doc": ""},
{"name": "inbreedingCoefficient", "type": ["null", {"type": "record", "name":
"InbreedingCoefficient", "doc": "", "fields": [{"name": "sampleId", "type": "string", "doc": ""},
{"name": "program", "type": "string", "doc": ""}, {"name": "version", "type": "string", "doc": ""},
{"name": "estimationMethod", "type": "string", "doc": ""}, {"name": "coefficient", "type": "double",
"doc": ""}, {"name": "standardError", "type": ["null", "double"], "doc": ""}]}], "doc": ""},
{"name": "additionalInformation", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}]}}}, {"name": "analysisPanels", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "AnalysisPanel", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"panelName", "type": "string"}, {"name": "panelVersion", "type": ["null", "string"]}, {"name":
"reviewOutcome", "type": "string"}, {"name": "multipleGeneticOrigins", "type": "string"}]}}]},
{"name": "diseasePenetrances", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "DiseasePenetrance", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"penetrance", "type": {"type": "enum", "name": "Penetrance", "doc": "", "symbols": ["complete",
"incomplete"]}}]}}]}, {"name": "readyForAnalysis", "type": "boolean"}, {"name": "familyQCState",
"type": ["null", {"type": "enum", "name": "FamilyQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}]}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"LDPCode",
"analysisPanels",
"diseasePenetrances",
"familyId",
"familyQCState",
"members",
"readyForAnalysis",
"versionControl",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'analysisPanels': AnalysisPanel,
'diseasePenetrances': DiseasePenetrance,
'members': PedigreeMember,
'versionControl': VersionControl,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'analysisPanels': AnalysisPanel,
'diseasePenetrances': DiseasePenetrance,
'members': PedigreeMember,
'versionControl': VersionControl,
}
return embeddedTypes[fieldName]
__slots__ = [
'LDPCode', 'analysisPanels', 'diseasePenetrances', 'familyId',
'familyQCState', 'members', 'readyForAnalysis',
'versionControl'
]
def __init__(self, **kwargs):
self.LDPCode = kwargs.get(
'LDPCode', None)
self.analysisPanels = kwargs.get(
'analysisPanels', None)
self.diseasePenetrances = kwargs.get(
'diseasePenetrances', None)
self.familyId = kwargs.get(
'familyId', None)
self.familyQCState = kwargs.get(
'familyQCState', None)
self.members = kwargs.get(
'members', None)
self.readyForAnalysis = kwargs.get(
'readyForAnalysis', None)
self.versionControl = kwargs.get(
'versionControl', None)
class PedigreeMember(ProtocolElement):
"""
This defines a RD Participant (demographics and pedigree
information)
"""
_schemaSource = """
{"type": "record", "name": "PedigreeMember", "namespace": "org.gel.models.participant.avro", "doc":
"", "fields": [{"name": "pedigreeId", "type": ["null", "int"], "doc": ""}, {"name": "isProband",
"type": ["null", "boolean"], "doc": ""}, {"name": "participantId", "type": ["null", "string"],
"doc": ""}, {"name": "participantQCState", "type": ["null", {"type": "enum", "name":
"ParticipantQCState", "doc": "", "symbols": ["noState", "passedMedicalReviewReadyForInterpretation",
"passedMedicalReviewNotReadyForInterpretation", "queryToGel", "queryToGMC", "failed"]}], "doc": ""},
{"name": "gelSuperFamilyId", "type": ["null", "string"], "doc": ""}, {"name": "sex", "type":
{"type": "enum", "name": "Sex", "doc": "", "symbols": ["MALE", "FEMALE", "UNKNOWN"]}, "doc": ""},
{"name": "personKaryotypicSex", "type": ["null", {"type": "enum", "name": "PersonKaryotipicSex",
"doc": "", "symbols": ["UNKNOWN", "XX", "XY", "XO", "XXY", "XXX", "XXYY", "XXXY", "XXXX", "XYY",
"OTHER"]}], "doc": ""}, {"name": "yearOfBirth", "type": ["null", "int"], "doc": ""}, {"name":
"fatherId", "type": ["null", "int"], "doc": ""}, {"name": "motherId", "type": ["null", "int"],
"doc": ""}, {"name": "superFatherId", "type": ["null", "int"], "doc": ""}, {"name": "superMotherId",
"type": ["null", "int"], "doc": ""}, {"name": "twinGroup", "type": ["null", "int"], "doc": ""},
{"name": "monozygotic", "type": ["null", {"type": "enum", "name": "TernaryOption", "doc": "",
"symbols": ["yes", "no", "unknown"]}], "doc": ""}, {"name": "adoptedStatus", "type": ["null",
{"type": "enum", "name": "AdoptedStatus", "doc": "", "symbols": ["notadopted", "adoptedin",
"adoptedout"]}], "doc": ""}, {"name": "lifeStatus", "type": ["null", {"type": "enum", "name":
"LifeStatus", "doc": "", "symbols": ["ALIVE", "ABORTED", "DECEASED", "UNBORN", "STILLBORN",
"MISCARRIAGE"]}], "doc": ""}, {"name": "consanguineousParents", "type": ["null", "TernaryOption"],
"doc": ""}, {"name": "affectionStatus", "type": ["null", {"type": "enum", "name": "AffectionStatus",
"doc": "", "symbols": ["UNAFFECTED", "AFFECTED", "UNCERTAIN"]}], "doc": ""}, {"name":
"disorderList", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Disorder",
"doc": "", "fields": [{"name": "diseaseGroup", "type": ["null", "string"], "doc": ""}, {"name":
"diseaseSubGroup", "type": ["null", "string"], "doc": ""}, {"name": "specificDisease", "type":
["null", "string"], "doc": ""}, {"name": "ageOfOnset", "type": ["null", "float"], "doc": ""}]}}],
"doc": ""}, {"name": "hpoTermList", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "HpoTerm", "doc": "", "fields": [{"name": "term", "type": "string", "doc": ""}, {"name":
"termPresence", "type": ["null", "TernaryOption"], "doc": ""}, {"name": "hpoBuildNumber", "type":
["null", "string"], "doc": ""}, {"name": "modifiers", "type": ["null", {"type": "record", "name":
"HpoTermModifiers", "fields": [{"name": "laterality", "type": ["null", {"type": "enum", "name":
"Laterality", "symbols": ["RIGHT", "UNILATERAL", "BILATERAL", "LEFT"]}]}, {"name": "progression",
"type": ["null", {"type": "enum", "name": "Progression", "symbols": ["PROGRESSIVE",
"NONPROGRESSIVE"]}]}, {"name": "severity", "type": ["null", {"type": "enum", "name": "Severity",
"symbols": ["BORDERLINE", "MILD", "MODERATE", "SEVERE", "PROFOUND"]}]}, {"name": "spatialPattern",
"type": ["null", {"type": "enum", "name": "SpatialPattern", "symbols": ["DISTAL", "GENERALIZED",
"LOCALIZED", "PROXIMAL"]}]}]}], "doc": ""}, {"name": "ageOfOnset", "type": ["null", {"type": "enum",
"name": "AgeOfOnset", "symbols": ["EMBRYONAL_ONSET", "FETAL_ONSET", "NEONATAL_ONSET",
"INFANTILE_ONSET", "CHILDHOOD_ONSET", "JUVENILE_ONSET", "YOUNG_ADULT_ONSET", "LATE_ONSET",
"MIDDLE_AGE_ONSET"]}], "doc": ""}]}}], "doc": ""}, {"name": "ancestries", "type": ["null", {"type":
"record", "name": "Ancestries", "doc": "", "fields": [{"name": "mothersEthnicOrigin", "type":
["null", {"type": "enum", "name": "EthnicCategory", "doc": "", "symbols": ["D", "E", "F", "G", "A",
"B", "C", "L", "M", "N", "H", "J", "K", "P", "S", "R", "Z"]}], "doc": ""}, {"name":
"mothersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"fathersEthnicOrigin", "type": ["null", "EthnicCategory"], "doc": ""}, {"name":
"fathersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"chiSquare1KGenomesPhase3Pop", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "ChiSquare1KGenomesPhase3Pop", "doc": "", "fields": [{"name": "kgSuperPopCategory", "type":
{"type": "enum", "name": "KgSuperPopCategory", "doc": "", "symbols": ["AFR", "AMR", "EAS", "EUR",
"SAS"]}, "doc": ""}, {"name": "kgPopCategory", "type": ["null", {"type": "enum", "name":
"KgPopCategory", "doc": "", "symbols": ["ACB", "ASW", "BEB", "CDX", "CEU", "CHB", "CHS", "CLM",
"ESN", "FIN", "GBR", "GIH", "GWD", "IBS", "ITU", "JPT", "KHV", "LWK", "MSL", "MXL", "PEL", "PJL",
"PUR", "STU", "TSI", "YRI"]}], "doc": ""}, {"name": "chiSquare", "type": "double", "doc": ""}]}}],
"doc": ""}]}], "doc": ""}, {"name": "consentStatus", "type": ["null", {"type": "record", "name":
"ConsentStatus", "doc": "", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "",
"default": false}, {"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default":
false}, {"name": "secondaryFindingConsent", "type": "boolean", "doc": "", "default": false},
{"name": "carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}], "doc": ""},
{"name": "samples", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Sample",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "source", "type": ["null", {"type": "enum", "name": "SampleSource", "symbols":
["BLOOD", "SALIVA", "FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "product", "type": ["null",
{"type": "enum", "name": "Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name":
"preparationMethod", "type": ["null", {"type": "enum", "name": "PreparationMethod", "symbols":
["EDTA", "ORAGENE", "FF", "FFPE", "CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}]}}], "doc": ""},
{"name": "inbreedingCoefficient", "type": ["null", {"type": "record", "name":
"InbreedingCoefficient", "doc": "", "fields": [{"name": "sampleId", "type": "string", "doc": ""},
{"name": "program", "type": "string", "doc": ""}, {"name": "version", "type": "string", "doc": ""},
{"name": "estimationMethod", "type": "string", "doc": ""}, {"name": "coefficient", "type": "double",
"doc": ""}, {"name": "standardError", "type": ["null", "double"], "doc": ""}]}], "doc": ""},
{"name": "additionalInformation", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalInformation",
"adoptedStatus",
"affectionStatus",
"ancestries",
"consanguineousParents",
"consentStatus",
"disorderList",
"fatherId",
"gelSuperFamilyId",
"hpoTermList",
"inbreedingCoefficient",
"isProband",
"lifeStatus",
"monozygotic",
"motherId",
"participantId",
"participantQCState",
"pedigreeId",
"personKaryotypicSex",
"samples",
"sex",
"superFatherId",
"superMotherId",
"twinGroup",
"yearOfBirth",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'ancestries': Ancestries,
'consentStatus': ConsentStatus,
'disorderList': Disorder,
'hpoTermList': HpoTerm,
'inbreedingCoefficient': InbreedingCoefficient,
'samples': Sample,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'ancestries': Ancestries,
'consentStatus': ConsentStatus,
'disorderList': Disorder,
'hpoTermList': HpoTerm,
'inbreedingCoefficient': InbreedingCoefficient,
'samples': Sample,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalInformation', 'adoptedStatus', 'affectionStatus',
'ancestries', 'consanguineousParents', 'consentStatus',
'disorderList', 'fatherId', 'gelSuperFamilyId', 'hpoTermList',
'inbreedingCoefficient', 'isProband', 'lifeStatus',
'monozygotic', 'motherId', 'participantId',
'participantQCState', 'pedigreeId', 'personKaryotypicSex',
'samples', 'sex', 'superFatherId', 'superMotherId',
'twinGroup', 'yearOfBirth'
]
def __init__(self, **kwargs):
self.additionalInformation = kwargs.get(
'additionalInformation', None)
self.adoptedStatus = kwargs.get(
'adoptedStatus', None)
self.affectionStatus = kwargs.get(
'affectionStatus', None)
self.ancestries = kwargs.get(
'ancestries', None)
self.consanguineousParents = kwargs.get(
'consanguineousParents', None)
self.consentStatus = kwargs.get(
'consentStatus', None)
self.disorderList = kwargs.get(
'disorderList', None)
self.fatherId = kwargs.get(
'fatherId', None)
self.gelSuperFamilyId = kwargs.get(
'gelSuperFamilyId', None)
self.hpoTermList = kwargs.get(
'hpoTermList', None)
self.inbreedingCoefficient = kwargs.get(
'inbreedingCoefficient', None)
self.isProband = kwargs.get(
'isProband', None)
self.lifeStatus = kwargs.get(
'lifeStatus', None)
self.monozygotic = kwargs.get(
'monozygotic', None)
self.motherId = kwargs.get(
'motherId', None)
self.participantId = kwargs.get(
'participantId', None)
self.participantQCState = kwargs.get(
'participantQCState', None)
self.pedigreeId = kwargs.get(
'pedigreeId', None)
self.personKaryotypicSex = kwargs.get(
'personKaryotypicSex', None)
self.samples = kwargs.get(
'samples', None)
self.sex = kwargs.get(
'sex', None)
self.superFatherId = kwargs.get(
'superFatherId', None)
self.superMotherId = kwargs.get(
'superMotherId', None)
self.twinGroup = kwargs.get(
'twinGroup', None)
self.yearOfBirth = kwargs.get(
'yearOfBirth', None)
class Penetrance(object):
"""
Penetrance assumed in the analysis
"""
complete = "complete"
incomplete = "incomplete"
def __hash__(self):
return str(self).__hash__()
class PersonKaryotipicSex(object):
"""
Karyotipic Sex
"""
UNKNOWN = "UNKNOWN"
XX = "XX"
XY = "XY"
XO = "XO"
XXY = "XXY"
XXX = "XXX"
XXYY = "XXYY"
XXXY = "XXXY"
XXXX = "XXXX"
XYY = "XYY"
OTHER = "OTHER"
def __hash__(self):
return str(self).__hash__()
class PhenotypesSolved(object):
"""
No documentation
"""
yes = "yes"
no = "no"
partially = "partially"
unknown = "unknown"
def __hash__(self):
return str(self).__hash__()
class PreparationMethod(object):
"""
No documentation
"""
EDTA = "EDTA"
ORAGENE = "ORAGENE"
FF = "FF"
FFPE = "FFPE"
CD128_SORTED_CELLS = "CD128_SORTED_CELLS"
ASPIRATE = "ASPIRATE"
def __hash__(self):
return str(self).__hash__()
class Product(object):
"""
No documentation
"""
DNA = "DNA"
RNA = "RNA"
def __hash__(self):
return str(self).__hash__()
class ProgrammePhase(object):
"""
No documentation
"""
CRUK = "CRUK"
OXFORD = "OXFORD"
CLL = "CLL"
IIP = "IIP"
MAIN = "MAIN"
EXPT = "EXPT"
def __hash__(self):
return str(self).__hash__()
class Progression(object):
"""
No documentation
"""
PROGRESSIVE = "PROGRESSIVE"
NONPROGRESSIVE = "NONPROGRESSIVE"
def __hash__(self):
return str(self).__hash__()
class RDFamilyChange(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "RDFamilyChange", "namespace": "org.gel.models.participant.avro",
"fields": [{"name": "FamilyId", "type": "string", "doc": ""}, {"name": "code", "type": {"type":
"enum", "name": "RDFamilyChangeCode", "doc": "", "symbols": ["FamilyAdded", "FamilyDeleted",
"ProbandChanged", "ParticipantAdded", "ParticipantRemoved", "ConsentStatusChanged",
"AffectionStatusChanged", "PanelAssignmentChanged", "SexChanged", "SampleChanged"]}, "doc": ""},
{"name": "Family", "type": {"type": "record", "name": "Pedigree", "doc": "", "fields": [{"name":
"versionControl", "type": ["null", {"type": "record", "name": "VersionControl", "fields": [{"name":
"GitVersionControl", "type": "string", "doc": "", "default": "1.0.3"}]}], "doc": ""}, {"name":
"LDPCode", "type": ["null", "string"]}, {"name": "familyId", "type": "string", "doc": ""}, {"name":
"members", "type": {"type": "array", "items": {"type": "record", "name": "PedigreeMember", "doc":
"", "fields": [{"name": "pedigreeId", "type": ["null", "int"], "doc": ""}, {"name": "isProband",
"type": ["null", "boolean"], "doc": ""}, {"name": "participantId", "type": ["null", "string"],
"doc": ""}, {"name": "participantQCState", "type": ["null", {"type": "enum", "name":
"ParticipantQCState", "doc": "", "symbols": ["noState", "passedMedicalReviewReadyForInterpretation",
"passedMedicalReviewNotReadyForInterpretation", "queryToGel", "queryToGMC", "failed"]}], "doc": ""},
{"name": "gelSuperFamilyId", "type": ["null", "string"], "doc": ""}, {"name": "sex", "type":
{"type": "enum", "name": "Sex", "doc": "", "symbols": ["MALE", "FEMALE", "UNKNOWN"]}, "doc": ""},
{"name": "personKaryotypicSex", "type": ["null", {"type": "enum", "name": "PersonKaryotipicSex",
"doc": "", "symbols": ["UNKNOWN", "XX", "XY", "XO", "XXY", "XXX", "XXYY", "XXXY", "XXXX", "XYY",
"OTHER"]}], "doc": ""}, {"name": "yearOfBirth", "type": ["null", "int"], "doc": ""}, {"name":
"fatherId", "type": ["null", "int"], "doc": ""}, {"name": "motherId", "type": ["null", "int"],
"doc": ""}, {"name": "superFatherId", "type": ["null", "int"], "doc": ""}, {"name": "superMotherId",
"type": ["null", "int"], "doc": ""}, {"name": "twinGroup", "type": ["null", "int"], "doc": ""},
{"name": "monozygotic", "type": ["null", {"type": "enum", "name": "TernaryOption", "doc": "",
"symbols": ["yes", "no", "unknown"]}], "doc": ""}, {"name": "adoptedStatus", "type": ["null",
{"type": "enum", "name": "AdoptedStatus", "doc": "", "symbols": ["notadopted", "adoptedin",
"adoptedout"]}], "doc": ""}, {"name": "lifeStatus", "type": ["null", {"type": "enum", "name":
"LifeStatus", "doc": "", "symbols": ["ALIVE", "ABORTED", "DECEASED", "UNBORN", "STILLBORN",
"MISCARRIAGE"]}], "doc": ""}, {"name": "consanguineousParents", "type": ["null", "TernaryOption"],
"doc": ""}, {"name": "affectionStatus", "type": ["null", {"type": "enum", "name": "AffectionStatus",
"doc": "", "symbols": ["UNAFFECTED", "AFFECTED", "UNCERTAIN"]}], "doc": ""}, {"name":
"disorderList", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Disorder",
"doc": "", "fields": [{"name": "diseaseGroup", "type": ["null", "string"], "doc": ""}, {"name":
"diseaseSubGroup", "type": ["null", "string"], "doc": ""}, {"name": "specificDisease", "type":
["null", "string"], "doc": ""}, {"name": "ageOfOnset", "type": ["null", "float"], "doc": ""}]}}],
"doc": ""}, {"name": "hpoTermList", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "HpoTerm", "doc": "", "fields": [{"name": "term", "type": "string", "doc": ""}, {"name":
"termPresence", "type": ["null", "TernaryOption"], "doc": ""}, {"name": "hpoBuildNumber", "type":
["null", "string"], "doc": ""}, {"name": "modifiers", "type": ["null", {"type": "record", "name":
"HpoTermModifiers", "fields": [{"name": "laterality", "type": ["null", {"type": "enum", "name":
"Laterality", "symbols": ["RIGHT", "UNILATERAL", "BILATERAL", "LEFT"]}]}, {"name": "progression",
"type": ["null", {"type": "enum", "name": "Progression", "symbols": ["PROGRESSIVE",
"NONPROGRESSIVE"]}]}, {"name": "severity", "type": ["null", {"type": "enum", "name": "Severity",
"symbols": ["BORDERLINE", "MILD", "MODERATE", "SEVERE", "PROFOUND"]}]}, {"name": "spatialPattern",
"type": ["null", {"type": "enum", "name": "SpatialPattern", "symbols": ["DISTAL", "GENERALIZED",
"LOCALIZED", "PROXIMAL"]}]}]}], "doc": ""}, {"name": "ageOfOnset", "type": ["null", {"type": "enum",
"name": "AgeOfOnset", "symbols": ["EMBRYONAL_ONSET", "FETAL_ONSET", "NEONATAL_ONSET",
"INFANTILE_ONSET", "CHILDHOOD_ONSET", "JUVENILE_ONSET", "YOUNG_ADULT_ONSET", "LATE_ONSET",
"MIDDLE_AGE_ONSET"]}], "doc": ""}]}}], "doc": ""}, {"name": "ancestries", "type": ["null", {"type":
"record", "name": "Ancestries", "doc": "", "fields": [{"name": "mothersEthnicOrigin", "type":
["null", {"type": "enum", "name": "EthnicCategory", "doc": "", "symbols": ["D", "E", "F", "G", "A",
"B", "C", "L", "M", "N", "H", "J", "K", "P", "S", "R", "Z"]}], "doc": ""}, {"name":
"mothersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"fathersEthnicOrigin", "type": ["null", "EthnicCategory"], "doc": ""}, {"name":
"fathersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"chiSquare1KGenomesPhase3Pop", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "ChiSquare1KGenomesPhase3Pop", "doc": "", "fields": [{"name": "kgSuperPopCategory", "type":
{"type": "enum", "name": "KgSuperPopCategory", "doc": "", "symbols": ["AFR", "AMR", "EAS", "EUR",
"SAS"]}, "doc": ""}, {"name": "kgPopCategory", "type": ["null", {"type": "enum", "name":
"KgPopCategory", "doc": "", "symbols": ["ACB", "ASW", "BEB", "CDX", "CEU", "CHB", "CHS", "CLM",
"ESN", "FIN", "GBR", "GIH", "GWD", "IBS", "ITU", "JPT", "KHV", "LWK", "MSL", "MXL", "PEL", "PJL",
"PUR", "STU", "TSI", "YRI"]}], "doc": ""}, {"name": "chiSquare", "type": "double", "doc": ""}]}}],
"doc": ""}]}], "doc": ""}, {"name": "consentStatus", "type": ["null", {"type": "record", "name":
"ConsentStatus", "doc": "", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "",
"default": false}, {"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default":
false}, {"name": "secondaryFindingConsent", "type": "boolean", "doc": "", "default": false},
{"name": "carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}], "doc": ""},
{"name": "samples", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Sample",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "source", "type": ["null", {"type": "enum", "name": "SampleSource", "symbols":
["BLOOD", "SALIVA", "FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "product", "type": ["null",
{"type": "enum", "name": "Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name":
"preparationMethod", "type": ["null", {"type": "enum", "name": "PreparationMethod", "symbols":
["EDTA", "ORAGENE", "FF", "FFPE", "CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}]}}], "doc": ""},
{"name": "inbreedingCoefficient", "type": ["null", {"type": "record", "name":
"InbreedingCoefficient", "doc": "", "fields": [{"name": "sampleId", "type": "string", "doc": ""},
{"name": "program", "type": "string", "doc": ""}, {"name": "version", "type": "string", "doc": ""},
{"name": "estimationMethod", "type": "string", "doc": ""}, {"name": "coefficient", "type": "double",
"doc": ""}, {"name": "standardError", "type": ["null", "double"], "doc": ""}]}], "doc": ""},
{"name": "additionalInformation", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}]}}}, {"name": "analysisPanels", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "AnalysisPanel", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"panelName", "type": "string"}, {"name": "panelVersion", "type": ["null", "string"]}, {"name":
"reviewOutcome", "type": "string"}, {"name": "multipleGeneticOrigins", "type": "string"}]}}]},
{"name": "diseasePenetrances", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "DiseasePenetrance", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"penetrance", "type": {"type": "enum", "name": "Penetrance", "doc": "", "symbols": ["complete",
"incomplete"]}}]}}]}, {"name": "readyForAnalysis", "type": "boolean"}, {"name": "familyQCState",
"type": ["null", {"type": "enum", "name": "FamilyQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}]}]}, "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"Family",
"FamilyId",
"code",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'Family': Pedigree,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'Family': Pedigree,
}
return embeddedTypes[fieldName]
__slots__ = [
'Family', 'FamilyId', 'code'
]
def __init__(self, **kwargs):
self.Family = kwargs.get(
'Family', Pedigree())
self.FamilyId = kwargs.get(
'FamilyId', None)
self.code = kwargs.get(
'code', None)
class RDFamilyChangeCode(object):
"""
This code define the change type: * `FamilyAdded`: This is a
new family. * `FamilyDeleted`: This family should be removed.
* `ProbandChanged`: The proband participant is now a different
member of the family. * `ParticipantAdded`: A new participant
has been sequenced and added to the family. *
`ParticipantRemoved`: A participant has been removed. *
`ConsentStatusChanged`: One or more participant in this family has
a different consent status. * `AffectionStatusChanged`:
HPOterms or Disorder changed in one or more participants in this
family. * `PanelAssignmentChanged`: Gene Panels has changed in
this family. * `SexChanged`: Sex has changed for one or more
participants in this family. * `SampleChanged`: The sample/s
associated to one or more participant in this family has changed.
"""
FamilyAdded = "FamilyAdded"
FamilyDeleted = "FamilyDeleted"
ProbandChanged = "ProbandChanged"
ParticipantAdded = "ParticipantAdded"
ParticipantRemoved = "ParticipantRemoved"
ConsentStatusChanged = "ConsentStatusChanged"
AffectionStatusChanged = "AffectionStatusChanged"
PanelAssignmentChanged = "PanelAssignmentChanged"
SexChanged = "SexChanged"
SampleChanged = "SampleChanged"
def __hash__(self):
return str(self).__hash__()
class RareDiseaseExitQuestionnaire(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "RareDiseaseExitQuestionnaire", "namespace":
"org.gel.models.report.avro", "fields": [{"name": "eventDate", "type": "string"}, {"name":
"reporter", "type": "string"}, {"name": "familyLevelQuestions", "type": {"type": "record", "name":
"FamilyLevelQuestions", "fields": [{"name": "caseSolvedFamily", "type": {"type": "enum", "name":
"CaseSolvedFamily", "symbols": ["yes", "no", "partially", "unknown"]}, "doc": ""}, {"name":
"segregationQuestion", "type": {"type": "enum", "name": "SegregationQuestion", "symbols": ["yes",
"no"]}, "doc": ""}, {"name": "additionalComments", "type": "string", "doc": ""}]}}, {"name":
"variantGroupLevelQuestions", "type": {"type": "array", "items": {"type": "record", "name":
"VariantGroupLevelQuestions", "fields": [{"name": "variantGroup", "type": "int"}, {"name":
"variantLevelQuestions", "type": {"type": "array", "items": {"type": "record", "name":
"VariantLevelQuestions", "fields": [{"name": "variantDetails", "type": "string", "doc": ""},
{"name": "confirmationDecision", "type": {"type": "enum", "name": "ConfirmationDecision", "symbols":
["yes", "no", "na"]}, "doc": ""}, {"name": "confirmationOutcome", "type": {"type": "enum", "name":
"ConfirmationOutcome", "symbols": ["yes", "no", "na"]}, "doc": ""}, {"name": "reportingQuestion",
"type": {"type": "enum", "name": "ReportingQuestion", "symbols": ["yes", "no", "na"]}, "doc": ""},
{"name": "acmgClassification", "type": {"type": "enum", "name": "ACMGClassification", "symbols":
["pathogenic_variant", "likely_pathogenic_variant", "variant_of_unknown_clinical_significance",
"likely_benign_variant", "benign_variant", "not_assessed"]}, "doc": ""}, {"name": "publications",
"type": "string", "doc": ""}]}}}, {"name": "actionability", "type": {"type": "enum", "name":
"Actionability", "symbols": ["yes", "no", "not_yet", "na"]}, "doc": ""}, {"name": "clinicalUtility",
"type": {"type": "array", "items": {"type": "enum", "name": "ClinicalUtility", "symbols": ["none",
"change_in_medication", "surgical_option", "additional_surveillance_for_proband_or_relatives",
"clinical_trial_eligibility", "informs_reproductive_choice", "unknown", "other"]}}, "doc": ""},
{"name": "phenotypesSolved", "type": {"type": "enum", "name": "PhenotypesSolved", "symbols": ["yes",
"no", "partially", "unknown"]}, "doc": ""}, {"name": "phenotypesExplained", "type": ["null",
{"type": "array", "items": "string"}], "doc": ""}]}}}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"eventDate",
"familyLevelQuestions",
"reporter",
"variantGroupLevelQuestions",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'familyLevelQuestions': FamilyLevelQuestions,
'variantGroupLevelQuestions': VariantGroupLevelQuestions,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'familyLevelQuestions': FamilyLevelQuestions,
'variantGroupLevelQuestions': VariantGroupLevelQuestions,
}
return embeddedTypes[fieldName]
__slots__ = [
'eventDate', 'familyLevelQuestions', 'reporter',
'variantGroupLevelQuestions'
]
def __init__(self, **kwargs):
self.eventDate = kwargs.get(
'eventDate', None)
self.familyLevelQuestions = kwargs.get(
'familyLevelQuestions', FamilyLevelQuestions())
self.reporter = kwargs.get(
'reporter', None)
self.variantGroupLevelQuestions = kwargs.get(
'variantGroupLevelQuestions', None)
class ReportEvent(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ReportEvent", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "reportEventId", "type": "string", "doc": ""}, {"name": "phenotype", "type": "string",
"doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc": ""}, {"name": "panelVersion",
"type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance", "type": {"type": "enum",
"name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type": {"type":
"enum", "name": "Penetrance", "namespace": "org.gel.models.participant.avro", "doc": "", "symbols":
["complete", "incomplete"]}, "doc": ""}, {"name": "score", "type": "float", "doc": ""}, {"name":
"vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc": ""}, {"name":
"variantClassification", "type": ["null", {"type": "enum", "name": "VariantClassification", "doc":
"", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"eventJustification",
"fullyExplainsPhenotype",
"genomicFeature",
"groupOfVariants",
"modeOfInheritance",
"panelName",
"panelVersion",
"penetrance",
"phenotype",
"reportEventId",
"score",
"tier",
"variantClassification",
"vendorSpecificScores",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'genomicFeature': GenomicFeature,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'genomicFeature': GenomicFeature,
}
return embeddedTypes[fieldName]
__slots__ = [
'eventJustification', 'fullyExplainsPhenotype',
'genomicFeature', 'groupOfVariants', 'modeOfInheritance',
'panelName', 'panelVersion', 'penetrance', 'phenotype',
'reportEventId', 'score', 'tier', 'variantClassification',
'vendorSpecificScores'
]
def __init__(self, **kwargs):
self.eventJustification = kwargs.get(
'eventJustification', None)
self.fullyExplainsPhenotype = kwargs.get(
'fullyExplainsPhenotype', None)
self.genomicFeature = kwargs.get(
'genomicFeature', GenomicFeature())
self.groupOfVariants = kwargs.get(
'groupOfVariants', None)
self.modeOfInheritance = kwargs.get(
'modeOfInheritance', None)
self.panelName = kwargs.get(
'panelName', None)
self.panelVersion = kwargs.get(
'panelVersion', None)
self.penetrance = kwargs.get(
'penetrance', None)
self.phenotype = kwargs.get(
'phenotype', None)
self.reportEventId = kwargs.get(
'reportEventId', None)
self.score = kwargs.get(
'score', None)
self.tier = kwargs.get(
'tier', None)
self.variantClassification = kwargs.get(
'variantClassification', None)
self.vendorSpecificScores = kwargs.get(
'vendorSpecificScores', None)
class ReportEventCancer(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ReportEventCancer", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "reportEventId", "type": "string", "doc": ""}, {"name": "genomicFeatureCancer", "type":
{"type": "record", "name": "GenomicFeatureCancer", "fields": [{"name": "featureType", "type":
{"type": "enum", "name": "FeatureTypeCancer", "doc": "", "symbols": ["regulatory_region", "gene",
"transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"refSeqTranscriptId", "type": "string", "doc": ""}, {"name": "refSeqProteinId", "type": "string",
"doc": ""}, {"name": "geneName", "type": "string", "doc": ""}, {"name": "roleInCancer", "type":
["null", {"type": "enum", "name": "RoleInCancer", "doc": "", "symbols": ["oncogene",
"tumor_suppressor_gene", "both"]}], "doc": ""}]}, "doc": ""}, {"name": "soTerms", "type": {"type":
"array", "items": {"type": "record", "name": "SoTerm", "doc": "", "fields": [{"name": "id", "type":
"string", "doc": ""}, {"name": "name", "type": "string", "doc": ""}]}}, "doc": ""}, {"name":
"actions", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Action",
"fields": [{"name": "actionType", "type": ["null", {"type": "enum", "name": "ActionType", "doc": "",
"symbols": ["therapy", "therapeutic", "prognosis", "diagnosis"]}]}, {"name": "evidences", "type":
["null", {"type": "array", "items": "string"}], "doc": ""}, {"name": "drug", "type": ["null",
"string"], "doc": ""}, {"name": "status", "type": ["null", {"type": "enum", "name": "ActionStatus",
"doc": "", "symbols": ["clinical", "pre_clinical"]}], "doc": ""}, {"name": "variantActionable",
"type": "boolean", "doc": ""}, {"name": "comments", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}, {"name": "url", "type": ["null", "string"], "doc": ""}, {"name":
"evidenceType", "type": ["null", "string"], "doc": ""}, {"name": "source", "type": "string", "doc":
""}]}}], "doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"actions",
"eventJustification",
"genomicFeatureCancer",
"groupOfVariants",
"reportEventId",
"soTerms",
"tier",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'actions': Action,
'genomicFeatureCancer': GenomicFeatureCancer,
'soTerms': SoTerm,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'actions': Action,
'genomicFeatureCancer': GenomicFeatureCancer,
'soTerms': SoTerm,
}
return embeddedTypes[fieldName]
__slots__ = [
'actions', 'eventJustification', 'genomicFeatureCancer',
'groupOfVariants', 'reportEventId', 'soTerms', 'tier'
]
def __init__(self, **kwargs):
self.actions = kwargs.get(
'actions', None)
self.eventJustification = kwargs.get(
'eventJustification', None)
self.genomicFeatureCancer = kwargs.get(
'genomicFeatureCancer', GenomicFeatureCancer())
self.groupOfVariants = kwargs.get(
'groupOfVariants', None)
self.reportEventId = kwargs.get(
'reportEventId', None)
self.soTerms = kwargs.get(
'soTerms', None)
self.tier = kwargs.get(
'tier', None)
class ReportVersionControl(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ReportVersionControl", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "gitVersionControl", "type": "string", "doc": "", "default": "4.2.0"}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'gitVersionControl'
]
def __init__(self, **kwargs):
self.gitVersionControl = kwargs.get(
'gitVersionControl', '4.2.0')
class ReportedModeOfInheritance(object):
"""
An enumeration for the different mode of inheritances: *
`monoallelic_not_imprinted`: MONOALLELIC, autosomal or
pseudoautosomal, not imprinted *
`monoallelic_maternally_imprinted`: MONOALLELIC, autosomal or
pseudoautosomal, maternally imprinted (paternal allele expressed)
* `monoallelic_paternally_imprinted`: MONOALLELIC, autosomal or
pseudoautosomal, paternally imprinted (maternal allele expressed)
* `monoallelic`: MONOALLELIC, autosomal or pseudoautosomal,
imprinted status unknown * `biallelic`: BIALLELIC, autosomal or
pseudoautosomal * `monoallelic_and_biallelic`: BOTH monoallelic
and biallelic, autosomal or pseudoautosomal *
`monoallelic_and_more_severe_biallelic`: BOTH monoallelic and
biallelic, autosomal or pseudoautosomal (but BIALLELIC mutations
cause a more SEVERE disease form), autosomal or pseudoautosomal *
`xlinked_biallelic`: X-LINKED: hemizygous mutation in males,
biallelic mutations in females * `xlinked_monoallelic`: X linked:
hemizygous mutation in males, monoallelic mutations in females may
cause disease (may be less severe, later onset than males) *
`mitochondrial`: MITOCHONDRIAL * `unknown`: Unknown
"""
monoallelic = "monoallelic"
monoallelic_not_imprinted = "monoallelic_not_imprinted"
monoallelic_maternally_imprinted = "monoallelic_maternally_imprinted"
monoallelic_paternally_imprinted = "monoallelic_paternally_imprinted"
biallelic = "biallelic"
monoallelic_and_biallelic = "monoallelic_and_biallelic"
monoallelic_and_more_severe_biallelic = "monoallelic_and_more_severe_biallelic"
xlinked_biallelic = "xlinked_biallelic"
xlinked_monoallelic = "xlinked_monoallelic"
mitochondrial = "mitochondrial"
unknown = "unknown"
def __hash__(self):
return str(self).__hash__()
class ReportedStructuralVariant(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ReportedStructuralVariant", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "chromosome", "type": "string", "doc": ""}, {"name": "start", "type": "int",
"doc": ""}, {"name": "end", "type": "int", "doc": ""}, {"name": "type", "type": "string", "doc":
""}, {"name": "reference", "type": "string", "doc": ""}, {"name": "alternate", "type": "string",
"doc": ""}, {"name": "calledGenotypes", "type": {"type": "array", "items": {"type": "record",
"name": "CalledGenotype", "doc": "", "fields": [{"name": "gelId", "type": "string", "doc": ""},
{"name": "sampleId", "type": "string", "doc": ""}, {"name": "genotype", "type": {"type": "enum",
"name": "Zygosity", "doc": "", "symbols": ["reference_homozygous", "heterozygous",
"alternate_homozygous", "missing", "half_missing_reference", "half_missing_alternate",
"alternate_hemizigous", "reference_hemizigous", "unk"]}, "doc": ""}, {"name": "phaseSet", "type":
["null", "int"], "doc": ""}, {"name": "depthReference", "type": ["null", "int"], "doc": ""},
{"name": "depthAlternate", "type": ["null", "int"], "doc": ""}, {"name": "copyNumber", "type":
["null", "int"], "doc": ""}]}}}, {"name": "reportEvents", "type": {"type": "array", "items":
{"type": "record", "name": "ReportEvent", "fields": [{"name": "reportEventId", "type": "string",
"doc": ""}, {"name": "phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type":
["null", "string"], "doc": ""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""},
{"name": "modeOfInheritance", "type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc":
"", "symbols": ["monoallelic", "monoallelic_not_imprinted", "monoallelic_maternally_imprinted",
"monoallelic_paternally_imprinted", "biallelic", "monoallelic_and_biallelic",
"monoallelic_and_more_severe_biallelic", "xlinked_biallelic", "xlinked_monoallelic",
"mitochondrial", "unknown"]}, "doc": ""}, {"name": "genomicFeature", "type": {"type": "record",
"name": "GenomicFeature", "fields": [{"name": "featureType", "type": {"type": "enum", "name":
"FeatureTypes", "symbols": ["RegulatoryRegion", "Gene", "Transcript"]}, "doc": ""}, {"name":
"ensemblId", "type": "string", "doc": ""}, {"name": "hgnc", "type": ["null", "string"], "doc": ""},
{"name": "otherIds", "type": ["null", {"type": "map", "values": "string"}], "doc": ""}]}, "doc":
""}, {"name": "penetrance", "type": {"type": "enum", "name": "Penetrance", "namespace":
"org.gel.models.participant.avro", "doc": "", "symbols": ["complete", "incomplete"]}, "doc": ""},
{"name": "score", "type": "float", "doc": ""}, {"name": "vendorSpecificScores", "type": ["null",
{"type": "map", "values": "float"}], "doc": ""}, {"name": "variantClassification", "type": ["null",
{"type": "enum", "name": "VariantClassification", "doc": "", "symbols": ["pathogenic_variant",
"likely_pathogenic_variant", "variant_of_unknown_clinical_significance", "likely_benign_variant",
"benign_variant", "not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null",
"boolean"], "doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalNumericVariantAnnotations",
"additionalTextualVariantAnnotations",
"alternate",
"calledGenotypes",
"chromosome",
"comments",
"end",
"evidenceIds",
"reference",
"reportEvents",
"start",
"type",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'calledGenotypes': CalledGenotype,
'reportEvents': ReportEvent,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'calledGenotypes': CalledGenotype,
'reportEvents': ReportEvent,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalNumericVariantAnnotations',
'additionalTextualVariantAnnotations', 'alternate',
'calledGenotypes', 'chromosome', 'comments', 'end',
'evidenceIds', 'reference', 'reportEvents', 'start', 'type'
]
def __init__(self, **kwargs):
self.additionalNumericVariantAnnotations = kwargs.get(
'additionalNumericVariantAnnotations', None)
self.additionalTextualVariantAnnotations = kwargs.get(
'additionalTextualVariantAnnotations', None)
self.alternate = kwargs.get(
'alternate', None)
self.calledGenotypes = kwargs.get(
'calledGenotypes', None)
self.chromosome = kwargs.get(
'chromosome', None)
self.comments = kwargs.get(
'comments', None)
self.end = kwargs.get(
'end', None)
self.evidenceIds = kwargs.get(
'evidenceIds', None)
self.reference = kwargs.get(
'reference', None)
self.reportEvents = kwargs.get(
'reportEvents', None)
self.start = kwargs.get(
'start', None)
self.type = kwargs.get(
'type', None)
class ReportedStructuralVariantCancer(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ReportedStructuralVariantCancer", "namespace":
"org.gel.models.report.avro", "fields": [{"name": "chromosome", "type": "string", "doc": ""},
{"name": "start", "type": "int", "doc": ""}, {"name": "end", "type": "int", "doc": ""}, {"name":
"type", "type": {"type": "record", "name": "StructuralVariantType", "doc": "", "fields": [{"name":
"firstLevelType", "type": {"type": "enum", "name": "StructuralVariantFirstLevelType", "doc": "",
"symbols": ["DEL", "INS", "DUP", "INV", "CNV", "DUP_TANDEM", "DEL_ME", "INS_ME"]}}, {"name":
"subtype", "type": ["null", "string"]}]}, "doc": ""}, {"name": "reference", "type": "string", "doc":
""}, {"name": "alternate", "type": "string", "doc": ""}, {"name":
"additionalTextualVariantAnnotations", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type": "map", "values":
"float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "alleleOrigins", "type": {"type": "array", "items": {"type": "enum", "name":
"AlleleOrigin", "doc": "", "symbols": ["de_novo_variant", "germline_variant", "maternal_variant",
"paternal_variant", "pedigree_specific_variant", "population_specific_variant",
"somatic_variant"]}}, "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalNumericVariantAnnotations",
"additionalTextualVariantAnnotations",
"alleleOrigins",
"alternate",
"chromosome",
"comments",
"end",
"reference",
"start",
"type",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'type': StructuralVariantType,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'type': StructuralVariantType,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalNumericVariantAnnotations',
'additionalTextualVariantAnnotations', 'alleleOrigins',
'alternate', 'chromosome', 'comments', 'end', 'reference',
'start', 'type'
]
def __init__(self, **kwargs):
self.additionalNumericVariantAnnotations = kwargs.get(
'additionalNumericVariantAnnotations', None)
self.additionalTextualVariantAnnotations = kwargs.get(
'additionalTextualVariantAnnotations', None)
self.alleleOrigins = kwargs.get(
'alleleOrigins', None)
self.alternate = kwargs.get(
'alternate', None)
self.chromosome = kwargs.get(
'chromosome', None)
self.comments = kwargs.get(
'comments', None)
self.end = kwargs.get(
'end', None)
self.reference = kwargs.get(
'reference', None)
self.start = kwargs.get(
'start', None)
self.type = kwargs.get(
'type', StructuralVariantType())
class ReportedVariant(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ReportedVariant", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "chromosome", "type": "string", "doc": ""}, {"name": "dbSnpId", "type": ["null",
"string"], "doc": ""}, {"name": "position", "type": "int", "doc": ""}, {"name": "reference", "type":
"string", "doc": ""}, {"name": "alternate", "type": "string", "doc": ""}, {"name":
"calledGenotypes", "type": {"type": "array", "items": {"type": "record", "name": "CalledGenotype",
"doc": "", "fields": [{"name": "gelId", "type": "string", "doc": ""}, {"name": "sampleId", "type":
"string", "doc": ""}, {"name": "genotype", "type": {"type": "enum", "name": "Zygosity", "doc": "",
"symbols": ["reference_homozygous", "heterozygous", "alternate_homozygous", "missing",
"half_missing_reference", "half_missing_alternate", "alternate_hemizigous", "reference_hemizigous",
"unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc": ""}, {"name":
"depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate", "type": ["null",
"int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}}, "doc": ""},
{"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEvent", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc":
""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance",
"type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type": {"type":
"enum", "name": "Penetrance", "namespace": "org.gel.models.participant.avro", "doc": "", "symbols":
["complete", "incomplete"]}, "doc": ""}, {"name": "score", "type": "float", "doc": ""}, {"name":
"vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc": ""}, {"name":
"variantClassification", "type": ["null", {"type": "enum", "name": "VariantClassification", "doc":
"", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalNumericVariantAnnotations",
"additionalTextualVariantAnnotations",
"alternate",
"calledGenotypes",
"chromosome",
"comments",
"dbSnpId",
"evidenceIds",
"position",
"reference",
"reportEvents",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'calledGenotypes': CalledGenotype,
'reportEvents': ReportEvent,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'calledGenotypes': CalledGenotype,
'reportEvents': ReportEvent,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalNumericVariantAnnotations',
'additionalTextualVariantAnnotations', 'alternate',
'calledGenotypes', 'chromosome', 'comments', 'dbSnpId',
'evidenceIds', 'position', 'reference', 'reportEvents'
]
def __init__(self, **kwargs):
self.additionalNumericVariantAnnotations = kwargs.get(
'additionalNumericVariantAnnotations', None)
self.additionalTextualVariantAnnotations = kwargs.get(
'additionalTextualVariantAnnotations', None)
self.alternate = kwargs.get(
'alternate', None)
self.calledGenotypes = kwargs.get(
'calledGenotypes', None)
self.chromosome = kwargs.get(
'chromosome', None)
self.comments = kwargs.get(
'comments', None)
self.dbSnpId = kwargs.get(
'dbSnpId', None)
self.evidenceIds = kwargs.get(
'evidenceIds', None)
self.position = kwargs.get(
'position', None)
self.reference = kwargs.get(
'reference', None)
self.reportEvents = kwargs.get(
'reportEvents', None)
class ReportedVariantCancer(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "ReportedVariantCancer", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "chromosome", "type": "string", "doc": ""}, {"name": "position", "type": "int",
"doc": ""}, {"name": "reference", "type": "string", "doc": ""}, {"name": "alternate", "type":
"string", "doc": ""}, {"name": "cosmicIds", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "clinVarIds", "type": ["null", {"type": "array", "items": "string"}], "doc":
""}, {"name": "dbSnpId", "type": ["null", "string"], "doc": ""}, {"name": "cdnaChange", "type":
["null", "string"], "doc": ""}, {"name": "proteinChange", "type": ["null", "string"], "doc": ""},
{"name": "commonAf", "type": ["null", "int"], "doc": ""}, {"name": "ihp", "type": ["null", "int"],
"doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null",
{"type": "map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type":
"array", "items": "string"}], "doc": ""}, {"name": "reportEvents", "type": {"type": "array",
"items": {"type": "record", "name": "ReportEventCancer", "fields": [{"name": "reportEventId",
"type": "string", "doc": ""}, {"name": "genomicFeatureCancer", "type": {"type": "record", "name":
"GenomicFeatureCancer", "fields": [{"name": "featureType", "type": {"type": "enum", "name":
"FeatureTypeCancer", "doc": "", "symbols": ["regulatory_region", "gene", "transcript"]}, "doc": ""},
{"name": "ensemblId", "type": "string", "doc": ""}, {"name": "refSeqTranscriptId", "type": "string",
"doc": ""}, {"name": "refSeqProteinId", "type": "string", "doc": ""}, {"name": "geneName", "type":
"string", "doc": ""}, {"name": "roleInCancer", "type": ["null", {"type": "enum", "name":
"RoleInCancer", "doc": "", "symbols": ["oncogene", "tumor_suppressor_gene", "both"]}], "doc": ""}]},
"doc": ""}, {"name": "soTerms", "type": {"type": "array", "items": {"type": "record", "name":
"SoTerm", "doc": "", "fields": [{"name": "id", "type": "string", "doc": ""}, {"name": "name",
"type": "string", "doc": ""}]}}, "doc": ""}, {"name": "actions", "type": ["null", {"type": "array",
"items": {"type": "record", "name": "Action", "fields": [{"name": "actionType", "type": ["null",
{"type": "enum", "name": "ActionType", "doc": "", "symbols": ["therapy", "therapeutic", "prognosis",
"diagnosis"]}]}, {"name": "evidences", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "drug", "type": ["null", "string"], "doc": ""}, {"name": "status", "type":
["null", {"type": "enum", "name": "ActionStatus", "doc": "", "symbols": ["clinical",
"pre_clinical"]}], "doc": ""}, {"name": "variantActionable", "type": "boolean", "doc": ""}, {"name":
"comments", "type": ["null", {"type": "array", "items": "string"}], "doc": ""}, {"name": "url",
"type": ["null", "string"], "doc": ""}, {"name": "evidenceType", "type": ["null", "string"], "doc":
""}, {"name": "source", "type": "string", "doc": ""}]}}], "doc": ""}, {"name": "groupOfVariants",
"type": ["null", "int"], "doc": ""}, {"name": "eventJustification", "type": ["null", "string"],
"doc": ""}, {"name": "tier", "type": ["null", {"type": "enum", "name": "Tier", "doc": "", "symbols":
["NONE", "TIER1", "TIER2", "TIER3"]}], "doc": ""}]}}, "doc": ""}, {"name": "variantCalls", "type":
["null", {"type": "array", "items": {"type": "record", "name": "VariantCall", "fields": [{"name":
"sampleId", "type": "string", "doc": ""}, {"name": "depthReference", "type": ["null", "int"], "doc":
""}, {"name": "depthAlternate", "type": ["null", "int"], "doc": ""}, {"name": "vaf", "type":
["null", "double"], "doc": ""}]}}], "doc": ""}, {"name": "alleleOrigins", "type": {"type": "array",
"items": {"type": "enum", "name": "AlleleOrigin", "doc": "", "symbols": ["de_novo_variant",
"germline_variant", "maternal_variant", "paternal_variant", "pedigree_specific_variant",
"population_specific_variant", "somatic_variant"]}}, "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalNumericVariantAnnotations",
"additionalTextualVariantAnnotations",
"alleleOrigins",
"alternate",
"cdnaChange",
"chromosome",
"clinVarIds",
"comments",
"commonAf",
"cosmicIds",
"dbSnpId",
"ihp",
"position",
"proteinChange",
"reference",
"reportEvents",
"variantCalls",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'reportEvents': ReportEventCancer,
'variantCalls': VariantCall,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'reportEvents': ReportEventCancer,
'variantCalls': VariantCall,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalNumericVariantAnnotations',
'additionalTextualVariantAnnotations', 'alleleOrigins',
'alternate', 'cdnaChange', 'chromosome', 'clinVarIds',
'comments', 'commonAf', 'cosmicIds', 'dbSnpId', 'ihp',
'position', 'proteinChange', 'reference', 'reportEvents',
'variantCalls'
]
def __init__(self, **kwargs):
self.additionalNumericVariantAnnotations = kwargs.get(
'additionalNumericVariantAnnotations', None)
self.additionalTextualVariantAnnotations = kwargs.get(
'additionalTextualVariantAnnotations', None)
self.alleleOrigins = kwargs.get(
'alleleOrigins', None)
self.alternate = kwargs.get(
'alternate', None)
self.cdnaChange = kwargs.get(
'cdnaChange', None)
self.chromosome = kwargs.get(
'chromosome', None)
self.clinVarIds = kwargs.get(
'clinVarIds', None)
self.comments = kwargs.get(
'comments', None)
self.commonAf = kwargs.get(
'commonAf', None)
self.cosmicIds = kwargs.get(
'cosmicIds', None)
self.dbSnpId = kwargs.get(
'dbSnpId', None)
self.ihp = kwargs.get(
'ihp', None)
self.position = kwargs.get(
'position', None)
self.proteinChange = kwargs.get(
'proteinChange', None)
self.reference = kwargs.get(
'reference', None)
self.reportEvents = kwargs.get(
'reportEvents', None)
self.variantCalls = kwargs.get(
'variantCalls', None)
class ReportingQuestion(object):
"""
No documentation
"""
yes = "yes"
no = "no"
na = "na"
def __hash__(self):
return str(self).__hash__()
class ReviewedParts(object):
"""
An enumeration for Which parts of the WGA were reviewed?:
* `domain_1`: Domain 1 only * `domain_1_and_2`: Domains 1
and 2 * `domain_1_2_and_suplementary`: Domains 1, 2 and
supplementary analysis
"""
domain_1 = "domain_1"
domain_1_and_2 = "domain_1_and_2"
domain_1_2_and_suplementary = "domain_1_2_and_suplementary"
def __hash__(self):
return str(self).__hash__()
class RoleInCancer(object):
"""
The role of a given genomic feature in cancer * oncogene: A gene
that is a mutated (changed) form of a gene involved in normal cell
growth. Oncogenes may cause the growth of cancer cells. Mutations
in genes that become oncogenes can be inherited or caused by being
exposed to substances in the environment that cause cancer.
http://purl.obolibrary.org/obo/NCIT_C16936 *
tumor_suppressor_gene: A type of gene that makes a protein called
a tumor suppressor protein that helps control cell growth.
Mutations (changes in DNA) in antioncogenes may lead to cancer.
http://purl.obolibrary.org/obo/NCIT_C17362
"""
oncogene = "oncogene"
tumor_suppressor_gene = "tumor_suppressor_gene"
both = "both"
def __hash__(self):
return str(self).__hash__()
class Sample(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "Sample", "namespace": "org.gel.models.participant.avro", "fields":
[{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int", "doc":
""}, {"name": "source", "type": ["null", {"type": "enum", "name": "SampleSource", "symbols":
["BLOOD", "SALIVA", "FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "product", "type": ["null",
{"type": "enum", "name": "Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name":
"preparationMethod", "type": ["null", {"type": "enum", "name": "PreparationMethod", "symbols":
["EDTA", "ORAGENE", "FF", "FFPE", "CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"labSampleId",
"preparationMethod",
"product",
"sampleId",
"source",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'labSampleId', 'preparationMethod', 'product', 'sampleId',
'source'
]
def __init__(self, **kwargs):
self.labSampleId = kwargs.get(
'labSampleId', None)
self.preparationMethod = kwargs.get(
'preparationMethod', None)
self.product = kwargs.get(
'product', None)
self.sampleId = kwargs.get(
'sampleId', None)
self.source = kwargs.get(
'source', None)
class SampleSource(object):
"""
No documentation
"""
TUMOUR = "TUMOUR"
BONE_MARROW_ASPIRATE_TUMOUR_SORTED_CELLS = "BONE_MARROW_ASPIRATE_TUMOUR_SORTED_CELLS"
BONE_MARROW_ASPIRATE_TUMOUR_CELLS = "BONE_MARROW_ASPIRATE_TUMOUR_CELLS"
BLOOD = "BLOOD"
SALIVA = "SALIVA"
FIBROBLAST = "FIBROBLAST"
TISSUE = "TISSUE"
def __hash__(self):
return str(self).__hash__()
class SegregationQuestion(object):
"""
No documentation
"""
yes = "yes"
no = "no"
def __hash__(self):
return str(self).__hash__()
class SensitiveInformation(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "SensitiveInformation", "namespace": "org.gel.models.participant.avro",
"fields": [{"name": "versionControl", "type": {"type": "record", "name": "VersionControl", "fields":
[{"name": "GitVersionControl", "type": "string", "doc": "", "default": "1.0.3"}]}, "doc": ""},
{"name": "gelID", "type": "string"}, {"name": "externalIds", "type": ["null", {"type": "array",
"items": "string"}]}, {"name": "genomicMedicineCenter", "type": ["null", "string"]}, {"name":
"fullNameOfResponsibleConsultant", "type": ["null", "string"]}, {"name": "contactNumber", "type":
["null", "string"]}, {"name": "hospitalOfResponsibleConsultant", "type": ["null", "string"]},
{"name": "centerSampleId", "type": ["null", "string"]}, {"name": "originatingCenter", "type":
["null", "string"]}, {"name": "centerPatientId", "type": ["null", "string"]}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"centerPatientId",
"centerSampleId",
"contactNumber",
"externalIds",
"fullNameOfResponsibleConsultant",
"gelID",
"genomicMedicineCenter",
"hospitalOfResponsibleConsultant",
"originatingCenter",
"versionControl",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'versionControl': VersionControl,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'versionControl': VersionControl,
}
return embeddedTypes[fieldName]
__slots__ = [
'centerPatientId', 'centerSampleId', 'contactNumber',
'externalIds', 'fullNameOfResponsibleConsultant', 'gelID',
'genomicMedicineCenter', 'hospitalOfResponsibleConsultant',
'originatingCenter', 'versionControl'
]
def __init__(self, **kwargs):
self.centerPatientId = kwargs.get(
'centerPatientId', None)
self.centerSampleId = kwargs.get(
'centerSampleId', None)
self.contactNumber = kwargs.get(
'contactNumber', None)
self.externalIds = kwargs.get(
'externalIds', None)
self.fullNameOfResponsibleConsultant = kwargs.get(
'fullNameOfResponsibleConsultant', None)
self.gelID = kwargs.get(
'gelID', None)
self.genomicMedicineCenter = kwargs.get(
'genomicMedicineCenter', None)
self.hospitalOfResponsibleConsultant = kwargs.get(
'hospitalOfResponsibleConsultant', None)
self.originatingCenter = kwargs.get(
'originatingCenter', None)
self.versionControl = kwargs.get(
'versionControl', VersionControl())
class Severity(object):
"""
No documentation
"""
BORDERLINE = "BORDERLINE"
MILD = "MILD"
MODERATE = "MODERATE"
SEVERE = "SEVERE"
PROFOUND = "PROFOUND"
def __hash__(self):
return str(self).__hash__()
class Sex(object):
"""
Sex
"""
MALE = "MALE"
FEMALE = "FEMALE"
UNKNOWN = "UNKNOWN"
def __hash__(self):
return str(self).__hash__()
class SoTerm(ProtocolElement):
"""
A Sequence Ontology term identifier by its id and name (e.g.: id =
SO:0001816 ; name = non synonymous)
"""
_schemaSource = """
{"type": "record", "name": "SoTerm", "namespace": "org.gel.models.report.avro", "doc": "", "fields":
[{"name": "id", "type": "string", "doc": ""}, {"name": "name", "type": "string", "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"id",
"name",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'id', 'name'
]
def __init__(self, **kwargs):
self.id = kwargs.get(
'id', None)
self.name = kwargs.get(
'name', None)
class SpatialPattern(object):
"""
No documentation
"""
DISTAL = "DISTAL"
GENERALIZED = "GENERALIZED"
LOCALIZED = "LOCALIZED"
PROXIMAL = "PROXIMAL"
def __hash__(self):
return str(self).__hash__()
class StructuralVariantFirstLevelType(object):
"""
The first level type must be one of the following: * `DEL`
Deletion relative to the reference * `INS` Insertion of novel
sequence relative to the reference * `DUP` Region of elevated copy
number relative to the reference * `INV` Inversion of reference
sequence * `CNV` Copy number variable region (may be both deletion
and duplication) The CNV category should not be used when a more
specific category can be applied. Reserved subtypes include: *
`DUP:TANDEM` Tandem duplication * `DEL:ME` Deletion of mobile
element relative to the reference * `INS:ME` Insertion of a mobile
element relative to the reference
"""
DEL = "DEL"
INS = "INS"
DUP = "DUP"
INV = "INV"
CNV = "CNV"
DUP_TANDEM = "DUP_TANDEM"
DEL_ME = "DEL_ME"
INS_ME = "INS_ME"
def __hash__(self):
return str(self).__hash__()
class StructuralVariantType(ProtocolElement):
"""
Structural variant type as defined by the VCF specification 4.2
for field ID.
"""
_schemaSource = """
{"type": "record", "name": "StructuralVariantType", "namespace": "org.gel.models.report.avro",
"doc": "", "fields": [{"name": "firstLevelType", "type": {"type": "enum", "name":
"StructuralVariantFirstLevelType", "doc": "", "symbols": ["DEL", "INS", "DUP", "INV", "CNV",
"DUP_TANDEM", "DEL_ME", "INS_ME"]}}, {"name": "subtype", "type": ["null", "string"]}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"firstLevelType",
"subtype",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'firstLevelType', 'subtype'
]
def __init__(self, **kwargs):
self.firstLevelType = kwargs.get(
'firstLevelType', None)
self.subtype = kwargs.get(
'subtype', None)
class SupportingEvidences(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "SupportingEvidences", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "previousSupportingEvidences", "type": {"type": "array", "items": "string"}},
{"name": "modifiedSupportingEvidences", "type": {"type": "array", "items": "string"}}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"modifiedSupportingEvidences",
"previousSupportingEvidences",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'modifiedSupportingEvidences', 'previousSupportingEvidences'
]
def __init__(self, **kwargs):
self.modifiedSupportingEvidences = kwargs.get(
'modifiedSupportingEvidences', None)
self.previousSupportingEvidences = kwargs.get(
'previousSupportingEvidences', None)
class TernaryOption(object):
"""
This defines a yes/no/unknown case
"""
yes = "yes"
no = "no"
unknown = "unknown"
def __hash__(self):
return str(self).__hash__()
class Tier(object):
"""
Possible tiers as defined by Genomics England
"""
NONE = "NONE"
TIER1 = "TIER1"
TIER2 = "TIER2"
TIER3 = "TIER3"
def __hash__(self):
return str(self).__hash__()
class TieringResult(ProtocolElement):
"""
Represents the results of a tiering process and its input patient
data. TieringResult is simply a more specific, less ambiguous
term for "cohort". We cannot use the InterpretationRequestRD
structure because there may be many interpretation requests
for the same tiering results.
"""
_schemaSource = """
{"type": "record", "name": "TieringResult", "namespace": "org.gel.models.report.avro", "doc": "",
"fields": [{"name": "versionControl", "type": {"type": "record", "name": "ReportVersionControl",
"fields": [{"name": "gitVersionControl", "type": "string", "doc": "", "default": "4.2.0"}]}, "doc":
""}, {"name": "genomeAssemblyVersion", "type": "string", "doc": "", "default": "GRCh37.p13"},
{"name": "cellbaseVersion", "type": "string", "doc": "", "default": "4.0"}, {"name": "workspace",
"type": {"type": "array", "items": "string"}, "doc": ""}, {"name": "bams", "type": {"type": "array",
"items": {"type": "record", "name": "File", "doc": "", "fields": [{"name": "sampleId", "type":
["null", {"type": "array", "items": "string"}], "doc": ""}, {"name": "uriFile", "type": "string",
"doc": ""}, {"name": "fileType", "type": {"type": "enum", "name": "FileType", "symbols": ["BAM",
"gVCF", "VCF_small", "VCF_somatic_small", "VCF_CNV", "VCF_somatic_CNV", "VCF_SV", "VCF_somatic_SV",
"VCF_SV_CNV", "SVG", "ANN", "BigWig", "MD5Sum", "ROH", "OTHER", "PARTITION", "VARIANT_FREQUENCIES",
"COVERAGE"]}}, {"name": "md5Sum", "type": ["null", "string"]}]}}, "doc": ""}, {"name": "vcfs",
"type": {"type": "array", "items": "File"}, "doc": ""}, {"name": "bigWigs", "type": ["null",
{"type": "array", "items": "File"}], "doc": ""}, {"name": "pedigreeDiagram", "type": ["null",
"File"], "doc": ""}, {"name": "annotationFile", "type": ["null", "File"], "doc": ""}, {"name":
"otherFiles", "type": ["null", {"type": "map", "values": "File"}], "doc": ""}, {"name": "pedigree",
"type": {"type": "record", "name": "Pedigree", "namespace": "org.gel.models.participant.avro",
"doc": "", "fields": [{"name": "versionControl", "type": ["null", {"type": "record", "name":
"VersionControl", "fields": [{"name": "GitVersionControl", "type": "string", "doc": "", "default":
"1.0.3"}]}], "doc": ""}, {"name": "LDPCode", "type": ["null", "string"]}, {"name": "familyId",
"type": "string", "doc": ""}, {"name": "members", "type": {"type": "array", "items": {"type":
"record", "name": "PedigreeMember", "doc": "", "fields": [{"name": "pedigreeId", "type": ["null",
"int"], "doc": ""}, {"name": "isProband", "type": ["null", "boolean"], "doc": ""}, {"name":
"participantId", "type": ["null", "string"], "doc": ""}, {"name": "participantQCState", "type":
["null", {"type": "enum", "name": "ParticipantQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}], "doc": ""}, {"name": "gelSuperFamilyId", "type": ["null",
"string"], "doc": ""}, {"name": "sex", "type": {"type": "enum", "name": "Sex", "doc": "", "symbols":
["MALE", "FEMALE", "UNKNOWN"]}, "doc": ""}, {"name": "personKaryotypicSex", "type": ["null",
{"type": "enum", "name": "PersonKaryotipicSex", "doc": "", "symbols": ["UNKNOWN", "XX", "XY", "XO",
"XXY", "XXX", "XXYY", "XXXY", "XXXX", "XYY", "OTHER"]}], "doc": ""}, {"name": "yearOfBirth", "type":
["null", "int"], "doc": ""}, {"name": "fatherId", "type": ["null", "int"], "doc": ""}, {"name":
"motherId", "type": ["null", "int"], "doc": ""}, {"name": "superFatherId", "type": ["null", "int"],
"doc": ""}, {"name": "superMotherId", "type": ["null", "int"], "doc": ""}, {"name": "twinGroup",
"type": ["null", "int"], "doc": ""}, {"name": "monozygotic", "type": ["null", {"type": "enum",
"name": "TernaryOption", "doc": "", "symbols": ["yes", "no", "unknown"]}], "doc": ""}, {"name":
"adoptedStatus", "type": ["null", {"type": "enum", "name": "AdoptedStatus", "doc": "", "symbols":
["notadopted", "adoptedin", "adoptedout"]}], "doc": ""}, {"name": "lifeStatus", "type": ["null",
{"type": "enum", "name": "LifeStatus", "doc": "", "symbols": ["ALIVE", "ABORTED", "DECEASED",
"UNBORN", "STILLBORN", "MISCARRIAGE"]}], "doc": ""}, {"name": "consanguineousParents", "type":
["null", "TernaryOption"], "doc": ""}, {"name": "affectionStatus", "type": ["null", {"type": "enum",
"name": "AffectionStatus", "doc": "", "symbols": ["UNAFFECTED", "AFFECTED", "UNCERTAIN"]}], "doc":
""}, {"name": "disorderList", "type": ["null", {"type": "array", "items": {"type": "record", "name":
"Disorder", "doc": "", "fields": [{"name": "diseaseGroup", "type": ["null", "string"], "doc": ""},
{"name": "diseaseSubGroup", "type": ["null", "string"], "doc": ""}, {"name": "specificDisease",
"type": ["null", "string"], "doc": ""}, {"name": "ageOfOnset", "type": ["null", "float"], "doc":
""}]}}], "doc": ""}, {"name": "hpoTermList", "type": ["null", {"type": "array", "items": {"type":
"record", "name": "HpoTerm", "doc": "", "fields": [{"name": "term", "type": "string", "doc": ""},
{"name": "termPresence", "type": ["null", "TernaryOption"], "doc": ""}, {"name": "hpoBuildNumber",
"type": ["null", "string"], "doc": ""}, {"name": "modifiers", "type": ["null", {"type": "record",
"name": "HpoTermModifiers", "fields": [{"name": "laterality", "type": ["null", {"type": "enum",
"name": "Laterality", "symbols": ["RIGHT", "UNILATERAL", "BILATERAL", "LEFT"]}]}, {"name":
"progression", "type": ["null", {"type": "enum", "name": "Progression", "symbols": ["PROGRESSIVE",
"NONPROGRESSIVE"]}]}, {"name": "severity", "type": ["null", {"type": "enum", "name": "Severity",
"symbols": ["BORDERLINE", "MILD", "MODERATE", "SEVERE", "PROFOUND"]}]}, {"name": "spatialPattern",
"type": ["null", {"type": "enum", "name": "SpatialPattern", "symbols": ["DISTAL", "GENERALIZED",
"LOCALIZED", "PROXIMAL"]}]}]}], "doc": ""}, {"name": "ageOfOnset", "type": ["null", {"type": "enum",
"name": "AgeOfOnset", "symbols": ["EMBRYONAL_ONSET", "FETAL_ONSET", "NEONATAL_ONSET",
"INFANTILE_ONSET", "CHILDHOOD_ONSET", "JUVENILE_ONSET", "YOUNG_ADULT_ONSET", "LATE_ONSET",
"MIDDLE_AGE_ONSET"]}], "doc": ""}]}}], "doc": ""}, {"name": "ancestries", "type": ["null", {"type":
"record", "name": "Ancestries", "doc": "", "fields": [{"name": "mothersEthnicOrigin", "type":
["null", {"type": "enum", "name": "EthnicCategory", "doc": "", "symbols": ["D", "E", "F", "G", "A",
"B", "C", "L", "M", "N", "H", "J", "K", "P", "S", "R", "Z"]}], "doc": ""}, {"name":
"mothersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"fathersEthnicOrigin", "type": ["null", "EthnicCategory"], "doc": ""}, {"name":
"fathersOtherRelevantAncestry", "type": ["null", "string"], "doc": ""}, {"name":
"chiSquare1KGenomesPhase3Pop", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "ChiSquare1KGenomesPhase3Pop", "doc": "", "fields": [{"name": "kgSuperPopCategory", "type":
{"type": "enum", "name": "KgSuperPopCategory", "doc": "", "symbols": ["AFR", "AMR", "EAS", "EUR",
"SAS"]}, "doc": ""}, {"name": "kgPopCategory", "type": ["null", {"type": "enum", "name":
"KgPopCategory", "doc": "", "symbols": ["ACB", "ASW", "BEB", "CDX", "CEU", "CHB", "CHS", "CLM",
"ESN", "FIN", "GBR", "GIH", "GWD", "IBS", "ITU", "JPT", "KHV", "LWK", "MSL", "MXL", "PEL", "PJL",
"PUR", "STU", "TSI", "YRI"]}], "doc": ""}, {"name": "chiSquare", "type": "double", "doc": ""}]}}],
"doc": ""}]}], "doc": ""}, {"name": "consentStatus", "type": ["null", {"type": "record", "name":
"ConsentStatus", "doc": "", "fields": [{"name": "programmeConsent", "type": "boolean", "doc": "",
"default": false}, {"name": "primaryFindingConsent", "type": "boolean", "doc": "", "default":
false}, {"name": "secondaryFindingConsent", "type": "boolean", "doc": "", "default": false},
{"name": "carrierStatusConsent", "type": "boolean", "doc": "", "default": false}]}], "doc": ""},
{"name": "samples", "type": ["null", {"type": "array", "items": {"type": "record", "name": "Sample",
"fields": [{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int",
"doc": ""}, {"name": "source", "type": ["null", {"type": "enum", "name": "SampleSource", "symbols":
["BLOOD", "SALIVA", "FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "product", "type": ["null",
{"type": "enum", "name": "Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name":
"preparationMethod", "type": ["null", {"type": "enum", "name": "PreparationMethod", "symbols":
["EDTA", "ORAGENE", "FF", "FFPE", "CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}]}}], "doc": ""},
{"name": "inbreedingCoefficient", "type": ["null", {"type": "record", "name":
"InbreedingCoefficient", "doc": "", "fields": [{"name": "sampleId", "type": "string", "doc": ""},
{"name": "program", "type": "string", "doc": ""}, {"name": "version", "type": "string", "doc": ""},
{"name": "estimationMethod", "type": "string", "doc": ""}, {"name": "coefficient", "type": "double",
"doc": ""}, {"name": "standardError", "type": ["null", "double"], "doc": ""}]}], "doc": ""},
{"name": "additionalInformation", "type": ["null", {"type": "map", "values": "string"}], "doc":
""}]}}}, {"name": "analysisPanels", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "AnalysisPanel", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"panelName", "type": "string"}, {"name": "panelVersion", "type": ["null", "string"]}, {"name":
"reviewOutcome", "type": "string"}, {"name": "multipleGeneticOrigins", "type": "string"}]}}]},
{"name": "diseasePenetrances", "type": ["null", {"type": "array", "items": {"type": "record",
"name": "DiseasePenetrance", "fields": [{"name": "specificDisease", "type": "string"}, {"name":
"penetrance", "type": {"type": "enum", "name": "Penetrance", "doc": "", "symbols": ["complete",
"incomplete"]}}]}}]}, {"name": "readyForAnalysis", "type": "boolean"}, {"name": "familyQCState",
"type": ["null", {"type": "enum", "name": "FamilyQCState", "doc": "", "symbols": ["noState",
"passedMedicalReviewReadyForInterpretation", "passedMedicalReviewNotReadyForInterpretation",
"queryToGel", "queryToGMC", "failed"]}]}]}, "doc": ""}, {"name": "tieredVariants", "type": {"type":
"array", "items": {"type": "record", "name": "ReportedVariant", "fields": [{"name": "chromosome",
"type": "string", "doc": ""}, {"name": "dbSnpId", "type": ["null", "string"], "doc": ""}, {"name":
"position", "type": "int", "doc": ""}, {"name": "reference", "type": "string", "doc": ""}, {"name":
"alternate", "type": "string", "doc": ""}, {"name": "calledGenotypes", "type": {"type": "array",
"items": {"type": "record", "name": "CalledGenotype", "doc": "", "fields": [{"name": "gelId",
"type": "string", "doc": ""}, {"name": "sampleId", "type": "string", "doc": ""}, {"name":
"genotype", "type": {"type": "enum", "name": "Zygosity", "doc": "", "symbols":
["reference_homozygous", "heterozygous", "alternate_homozygous", "missing",
"half_missing_reference", "half_missing_alternate", "alternate_hemizigous", "reference_hemizigous",
"unk"]}, "doc": ""}, {"name": "phaseSet", "type": ["null", "int"], "doc": ""}, {"name":
"depthReference", "type": ["null", "int"], "doc": ""}, {"name": "depthAlternate", "type": ["null",
"int"], "doc": ""}, {"name": "copyNumber", "type": ["null", "int"], "doc": ""}]}}, "doc": ""},
{"name": "reportEvents", "type": {"type": "array", "items": {"type": "record", "name":
"ReportEvent", "fields": [{"name": "reportEventId", "type": "string", "doc": ""}, {"name":
"phenotype", "type": "string", "doc": ""}, {"name": "panelName", "type": ["null", "string"], "doc":
""}, {"name": "panelVersion", "type": ["null", "string"], "doc": ""}, {"name": "modeOfInheritance",
"type": {"type": "enum", "name": "ReportedModeOfInheritance", "doc": "", "symbols": ["monoallelic",
"monoallelic_not_imprinted", "monoallelic_maternally_imprinted", "monoallelic_paternally_imprinted",
"biallelic", "monoallelic_and_biallelic", "monoallelic_and_more_severe_biallelic",
"xlinked_biallelic", "xlinked_monoallelic", "mitochondrial", "unknown"]}, "doc": ""}, {"name":
"genomicFeature", "type": {"type": "record", "name": "GenomicFeature", "fields": [{"name":
"featureType", "type": {"type": "enum", "name": "FeatureTypes", "symbols": ["RegulatoryRegion",
"Gene", "Transcript"]}, "doc": ""}, {"name": "ensemblId", "type": "string", "doc": ""}, {"name":
"hgnc", "type": ["null", "string"], "doc": ""}, {"name": "otherIds", "type": ["null", {"type":
"map", "values": "string"}], "doc": ""}]}, "doc": ""}, {"name": "penetrance", "type":
"org.gel.models.participant.avro.Penetrance", "doc": ""}, {"name": "score", "type": "float", "doc":
""}, {"name": "vendorSpecificScores", "type": ["null", {"type": "map", "values": "float"}], "doc":
""}, {"name": "variantClassification", "type": ["null", {"type": "enum", "name":
"VariantClassification", "doc": "", "symbols": ["pathogenic_variant", "likely_pathogenic_variant",
"variant_of_unknown_clinical_significance", "likely_benign_variant", "benign_variant",
"not_assessed"]}], "doc": ""}, {"name": "fullyExplainsPhenotype", "type": ["null", "boolean"],
"doc": ""}, {"name": "groupOfVariants", "type": ["null", "int"], "doc": ""}, {"name":
"eventJustification", "type": ["null", "string"], "doc": ""}, {"name": "tier", "type": ["null",
{"type": "enum", "name": "Tier", "doc": "", "symbols": ["NONE", "TIER1", "TIER2", "TIER3"]}], "doc":
""}]}}, "doc": ""}, {"name": "additionalTextualVariantAnnotations", "type": ["null", {"type": "map",
"values": "string"}], "doc": ""}, {"name": "evidenceIds", "type": ["null", {"type": "map", "values":
"string"}], "doc": ""}, {"name": "additionalNumericVariantAnnotations", "type": ["null", {"type":
"map", "values": "float"}], "doc": ""}, {"name": "comments", "type": ["null", {"type": "array",
"items": "string"}], "doc": ""}]}}, "doc": ""}, {"name": "tieringVersion", "type": "string", "doc":
""}, {"name": "internalStudyId", "type": "string", "doc": ""}, {"name": "complexGeneticPhenomena",
"type": ["null", {"type": "enum", "name": "ComplexGeneticPhenomena", "symbols": ["mosaicism",
"monosomy", "disomy", "uniparental_disomy", "trisomy", "other_aneuploidy"]}], "doc": ""}, {"name":
"otherFamilyHistory", "type": ["null", {"type": "record", "name": "OtherFamilyHistory", "doc": "",
"fields": [{"name": "maternalFamilyHistory", "type": ["null", {"type": "array", "items": "string"}],
"doc": ""}, {"name": "paternalFamilyHistory", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}]}], "doc": ""}, {"name": "genePanelsCoverage", "type": ["null", {"type":
"map", "values": {"type": "map", "values": {"type": "map", "values": "float"}}}], "doc": ""},
{"name": "additionalInfo", "type": ["null", {"type": "map", "values": "string"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"additionalInfo",
"annotationFile",
"bams",
"bigWigs",
"complexGeneticPhenomena",
"genePanelsCoverage",
"internalStudyId",
"otherFamilyHistory",
"otherFiles",
"pedigree",
"pedigreeDiagram",
"tieredVariants",
"tieringVersion",
"vcfs",
"versionControl",
"workspace",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'annotationFile': File,
'bams': File,
'bigWigs': File,
'otherFamilyHistory': OtherFamilyHistory,
'otherFiles': File,
'pedigree': Pedigree,
'pedigreeDiagram': File,
'tieredVariants': ReportedVariant,
'vcfs': File,
'versionControl': ReportVersionControl,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'annotationFile': File,
'bams': File,
'bigWigs': File,
'otherFamilyHistory': OtherFamilyHistory,
'otherFiles': File,
'pedigree': Pedigree,
'pedigreeDiagram': File,
'tieredVariants': ReportedVariant,
'vcfs': File,
'versionControl': ReportVersionControl,
}
return embeddedTypes[fieldName]
__slots__ = [
'additionalInfo', 'annotationFile', 'bams', 'bigWigs',
'cellbaseVersion', 'complexGeneticPhenomena',
'genePanelsCoverage', 'genomeAssemblyVersion',
'internalStudyId', 'otherFamilyHistory', 'otherFiles',
'pedigree', 'pedigreeDiagram', 'tieredVariants',
'tieringVersion', 'vcfs', 'versionControl', 'workspace'
]
def __init__(self, **kwargs):
self.additionalInfo = kwargs.get(
'additionalInfo', None)
self.annotationFile = kwargs.get(
'annotationFile', None)
self.bams = kwargs.get(
'bams', None)
self.bigWigs = kwargs.get(
'bigWigs', None)
self.cellbaseVersion = kwargs.get(
'cellbaseVersion', '4.0')
self.complexGeneticPhenomena = kwargs.get(
'complexGeneticPhenomena', None)
self.genePanelsCoverage = kwargs.get(
'genePanelsCoverage', None)
self.genomeAssemblyVersion = kwargs.get(
'genomeAssemblyVersion', 'GRCh37.p13')
self.internalStudyId = kwargs.get(
'internalStudyId', None)
self.otherFamilyHistory = kwargs.get(
'otherFamilyHistory', None)
self.otherFiles = kwargs.get(
'otherFiles', None)
self.pedigree = kwargs.get(
'pedigree', Pedigree())
self.pedigreeDiagram = kwargs.get(
'pedigreeDiagram', None)
self.tieredVariants = kwargs.get(
'tieredVariants', None)
self.tieringVersion = kwargs.get(
'tieringVersion', None)
self.vcfs = kwargs.get(
'vcfs', None)
self.versionControl = kwargs.get(
'versionControl', ReportVersionControl())
self.workspace = kwargs.get(
'workspace', None)
class TissueSource(object):
"""
No documentation
"""
BMA_TUMOUR_SORTED_CELLS = "BMA_TUMOUR_SORTED_CELLS"
CT_GUIDED_BIOPSY = "CT_GUIDED_BIOPSY"
ENDOSCOPIC_BIOPSY = "ENDOSCOPIC_BIOPSY"
ENDOSCOPIC_ULTRASOUND_GUIDED_BIOPSY = "ENDOSCOPIC_ULTRASOUND_GUIDED_BIOPSY"
ENDOSCOPIC_ULTRASOUND_GUIDED_FNA = "ENDOSCOPIC_ULTRASOUND_GUIDED_FNA"
LAPAROSCOPIC_BIOPSY = "LAPAROSCOPIC_BIOPSY"
LAPAROSCOPIC_EXCISION = "LAPAROSCOPIC_EXCISION"
MRI_GUIDED_BIOPSY = "MRI_GUIDED_BIOPSY"
NON_GUIDED_BIOPSY = "NON_GUIDED_BIOPSY"
SURGICAL_RESECTION = "SURGICAL_RESECTION"
STEREOTACTICALLY_GUIDED_BIOPSY = "STEREOTACTICALLY_GUIDED_BIOPSY"
USS_GUIDED_BIOPSY = "USS_GUIDED_BIOPSY"
NON_STANDARD_BIOPSY = "NON_STANDARD_BIOPSY"
def __hash__(self):
return str(self).__hash__()
class TumourContent(object):
"""
No documentation
"""
High = "High"
Medium = "Medium"
Low = "Low"
def __hash__(self):
return str(self).__hash__()
class TumourSample(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "TumourSample", "namespace": "org.gel.models.participant.avro", "fields":
[{"name": "sampleId", "type": "string", "doc": ""}, {"name": "labSampleId", "type": "int", "doc":
""}, {"name": "LDPCode", "type": "string", "doc": ""}, {"name": "tumourId", "type": "string", "doc":
""}, {"name": "programmePhase", "type": ["null", {"type": "enum", "name": "ProgrammePhase",
"symbols": ["CRUK", "OXFORD", "CLL", "IIP", "MAIN", "EXPT"]}], "doc": ""}, {"name": "diseaseType",
"type": ["null", {"type": "enum", "name": "diseaseType", "symbols": ["ADULT_GLIOMA", "BLADDER",
"BREAST", "CARCINOMA_OF_UNKNOWN_PRIMARY", "CHILDHOOD", "COLORECTAL", "ENDOMETRIAL_CARCINOMA",
"HAEMONC", "HEPATOPANCREATOBILIARY", "LUNG", "MALIGNANT_MELANOMA", "NASOPHARYNGEAL",
"ORAL_OROPHARYNGEAL", "OVARIAN", "PROSTATE", "RENAL", "SARCOMA", "SINONASAL",
"TESTICULAR_GERM_CELL_TUMOURS", "UPPER_GASTROINTESTINAL",
"NON_HODGKINS_B_CELL_LYMPHOMA_LOW_MOD_GRADE", "CLASSICAL_HODGKINS",
"NODULAR_LYMPHOCYTE_PREDOMINANT_HODGKINS", "T_CELL_LYMPHOMA"]}], "doc": ""}, {"name":
"diseaseSubType", "type": ["null", "string"], "doc": ""}, {"name": "clinicalSampleDateTime", "type":
["null", "string"], "doc": ""}, {"name": "tumourType", "type": ["null", {"type": "enum", "name":
"TumourType", "symbols": ["PRIMARY", "METASTATIC_RECURRENCE", "RECURRENCE_OF_PRIMARY_TUMOUR",
"METASTASES"]}], "doc": ""}, {"name": "tumourContent", "type": ["null", {"type": "enum", "name":
"TumourContent", "symbols": ["High", "Medium", "Low"]}], "doc": ""}, {"name": "source", "type":
["null", {"type": "enum", "name": "SampleSource", "symbols": ["TUMOUR",
"BONE_MARROW_ASPIRATE_TUMOUR_SORTED_CELLS", "BONE_MARROW_ASPIRATE_TUMOUR_CELLS", "BLOOD", "SALIVA",
"FIBROBLAST", "TISSUE"]}], "doc": ""}, {"name": "preparationMethod", "type": ["null", {"type":
"enum", "name": "PreparationMethod", "symbols": ["EDTA", "ORAGENE", "FF", "FFPE",
"CD128_SORTED_CELLS", "ASPIRATE"]}], "doc": ""}, {"name": "tissueSource", "type": ["null", {"type":
"enum", "name": "TissueSource", "symbols": ["BMA_TUMOUR_SORTED_CELLS", "CT_GUIDED_BIOPSY",
"ENDOSCOPIC_BIOPSY", "ENDOSCOPIC_ULTRASOUND_GUIDED_BIOPSY", "ENDOSCOPIC_ULTRASOUND_GUIDED_FNA",
"LAPAROSCOPIC_BIOPSY", "LAPAROSCOPIC_EXCISION", "MRI_GUIDED_BIOPSY", "NON_GUIDED_BIOPSY",
"SURGICAL_RESECTION", "STEREOTACTICALLY_GUIDED_BIOPSY", "USS_GUIDED_BIOPSY",
"NON_STANDARD_BIOPSY"]}], "doc": ""}, {"name": "product", "type": ["null", {"type": "enum", "name":
"Product", "symbols": ["DNA", "RNA"]}], "doc": ""}, {"name": "morphologyICD", "type": ["null",
"string"], "doc": ""}, {"name": "morphologySnomedCT", "type": ["null", "string"], "doc": ""},
{"name": "morphologySnomedRT", "type": ["null", "string"], "doc": ""}, {"name": "topographyICD",
"type": ["null", "string"], "doc": ""}, {"name": "topographySnomedCT", "type": ["null", "string"],
"doc": ""}, {"name": "topographySnomedRT", "type": ["null", "string"], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"LDPCode",
"clinicalSampleDateTime",
"diseaseSubType",
"diseaseType",
"labSampleId",
"morphologyICD",
"morphologySnomedCT",
"morphologySnomedRT",
"preparationMethod",
"product",
"programmePhase",
"sampleId",
"source",
"tissueSource",
"topographyICD",
"topographySnomedCT",
"topographySnomedRT",
"tumourContent",
"tumourId",
"tumourType",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'LDPCode', 'clinicalSampleDateTime', 'diseaseSubType',
'diseaseType', 'labSampleId', 'morphologyICD',
'morphologySnomedCT', 'morphologySnomedRT',
'preparationMethod', 'product', 'programmePhase', 'sampleId',
'source', 'tissueSource', 'topographyICD',
'topographySnomedCT', 'topographySnomedRT', 'tumourContent',
'tumourId', 'tumourType'
]
def __init__(self, **kwargs):
self.LDPCode = kwargs.get(
'LDPCode', None)
self.clinicalSampleDateTime = kwargs.get(
'clinicalSampleDateTime', None)
self.diseaseSubType = kwargs.get(
'diseaseSubType', None)
self.diseaseType = kwargs.get(
'diseaseType', None)
self.labSampleId = kwargs.get(
'labSampleId', None)
self.morphologyICD = kwargs.get(
'morphologyICD', None)
self.morphologySnomedCT = kwargs.get(
'morphologySnomedCT', None)
self.morphologySnomedRT = kwargs.get(
'morphologySnomedRT', None)
self.preparationMethod = kwargs.get(
'preparationMethod', None)
self.product = kwargs.get(
'product', None)
self.programmePhase = kwargs.get(
'programmePhase', None)
self.sampleId = kwargs.get(
'sampleId', None)
self.source = kwargs.get(
'source', None)
self.tissueSource = kwargs.get(
'tissueSource', None)
self.topographyICD = kwargs.get(
'topographyICD', None)
self.topographySnomedCT = kwargs.get(
'topographySnomedCT', None)
self.topographySnomedRT = kwargs.get(
'topographySnomedRT', None)
self.tumourContent = kwargs.get(
'tumourContent', None)
self.tumourId = kwargs.get(
'tumourId', None)
self.tumourType = kwargs.get(
'tumourType', None)
class TumourType(object):
"""
No documentation
"""
PRIMARY = "PRIMARY"
METASTATIC_RECURRENCE = "METASTATIC_RECURRENCE"
RECURRENCE_OF_PRIMARY_TUMOUR = "RECURRENCE_OF_PRIMARY_TUMOUR"
METASTASES = "METASTASES"
def __hash__(self):
return str(self).__hash__()
class VariantCall(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "VariantCall", "namespace": "org.gel.models.report.avro", "fields":
[{"name": "sampleId", "type": "string", "doc": ""}, {"name": "depthReference", "type": ["null",
"int"], "doc": ""}, {"name": "depthAlternate", "type": ["null", "int"], "doc": ""}, {"name": "vaf",
"type": ["null", "double"], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"depthAlternate",
"depthReference",
"sampleId",
"vaf",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'depthAlternate', 'depthReference', 'sampleId', 'vaf'
]
def __init__(self, **kwargs):
self.depthAlternate = kwargs.get(
'depthAlternate', None)
self.depthReference = kwargs.get(
'depthReference', None)
self.sampleId = kwargs.get(
'sampleId', None)
self.vaf = kwargs.get(
'vaf', None)
class VariantClassification(object):
"""
This is the classification of the variant according to standard
practice guidelines (e.g. ACMG)
"""
pathogenic_variant = "pathogenic_variant"
likely_pathogenic_variant = "likely_pathogenic_variant"
variant_of_unknown_clinical_significance = "variant_of_unknown_clinical_significance"
likely_benign_variant = "likely_benign_variant"
benign_variant = "benign_variant"
not_assessed = "not_assessed"
def __hash__(self):
return str(self).__hash__()
class VariantGroupLevelQuestions(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "VariantGroupLevelQuestions", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "variantGroup", "type": "int"}, {"name": "variantLevelQuestions", "type":
{"type": "array", "items": {"type": "record", "name": "VariantLevelQuestions", "fields": [{"name":
"variantDetails", "type": "string", "doc": ""}, {"name": "confirmationDecision", "type": {"type":
"enum", "name": "ConfirmationDecision", "symbols": ["yes", "no", "na"]}, "doc": ""}, {"name":
"confirmationOutcome", "type": {"type": "enum", "name": "ConfirmationOutcome", "symbols": ["yes",
"no", "na"]}, "doc": ""}, {"name": "reportingQuestion", "type": {"type": "enum", "name":
"ReportingQuestion", "symbols": ["yes", "no", "na"]}, "doc": ""}, {"name": "acmgClassification",
"type": {"type": "enum", "name": "ACMGClassification", "symbols": ["pathogenic_variant",
"likely_pathogenic_variant", "variant_of_unknown_clinical_significance", "likely_benign_variant",
"benign_variant", "not_assessed"]}, "doc": ""}, {"name": "publications", "type": "string", "doc":
""}]}}}, {"name": "actionability", "type": {"type": "enum", "name": "Actionability", "symbols":
["yes", "no", "not_yet", "na"]}, "doc": ""}, {"name": "clinicalUtility", "type": {"type": "array",
"items": {"type": "enum", "name": "ClinicalUtility", "symbols": ["none", "change_in_medication",
"surgical_option", "additional_surveillance_for_proband_or_relatives", "clinical_trial_eligibility",
"informs_reproductive_choice", "unknown", "other"]}}, "doc": ""}, {"name": "phenotypesSolved",
"type": {"type": "enum", "name": "PhenotypesSolved", "symbols": ["yes", "no", "partially",
"unknown"]}, "doc": ""}, {"name": "phenotypesExplained", "type": ["null", {"type": "array", "items":
"string"}], "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"actionability",
"clinicalUtility",
"phenotypesExplained",
"phenotypesSolved",
"variantGroup",
"variantLevelQuestions",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {
'variantLevelQuestions': VariantLevelQuestions,
}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {
'variantLevelQuestions': VariantLevelQuestions,
}
return embeddedTypes[fieldName]
__slots__ = [
'actionability', 'clinicalUtility', 'phenotypesExplained',
'phenotypesSolved', 'variantGroup', 'variantLevelQuestions'
]
def __init__(self, **kwargs):
self.actionability = kwargs.get(
'actionability', None)
self.clinicalUtility = kwargs.get(
'clinicalUtility', None)
self.phenotypesExplained = kwargs.get(
'phenotypesExplained', None)
self.phenotypesSolved = kwargs.get(
'phenotypesSolved', None)
self.variantGroup = kwargs.get(
'variantGroup', None)
self.variantLevelQuestions = kwargs.get(
'variantLevelQuestions', None)
class VariantLevelQuestions(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "VariantLevelQuestions", "namespace": "org.gel.models.report.avro",
"fields": [{"name": "variantDetails", "type": "string", "doc": ""}, {"name": "confirmationDecision",
"type": {"type": "enum", "name": "ConfirmationDecision", "symbols": ["yes", "no", "na"]}, "doc":
""}, {"name": "confirmationOutcome", "type": {"type": "enum", "name": "ConfirmationOutcome",
"symbols": ["yes", "no", "na"]}, "doc": ""}, {"name": "reportingQuestion", "type": {"type": "enum",
"name": "ReportingQuestion", "symbols": ["yes", "no", "na"]}, "doc": ""}, {"name":
"acmgClassification", "type": {"type": "enum", "name": "ACMGClassification", "symbols":
["pathogenic_variant", "likely_pathogenic_variant", "variant_of_unknown_clinical_significance",
"likely_benign_variant", "benign_variant", "not_assessed"]}, "doc": ""}, {"name": "publications",
"type": "string", "doc": ""}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {
"acmgClassification",
"confirmationDecision",
"confirmationOutcome",
"publications",
"reportingQuestion",
"variantDetails",
}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'acmgClassification', 'confirmationDecision',
'confirmationOutcome', 'publications', 'reportingQuestion',
'variantDetails'
]
def __init__(self, **kwargs):
self.acmgClassification = kwargs.get(
'acmgClassification', None)
self.confirmationDecision = kwargs.get(
'confirmationDecision', None)
self.confirmationOutcome = kwargs.get(
'confirmationOutcome', None)
self.publications = kwargs.get(
'publications', None)
self.reportingQuestion = kwargs.get(
'reportingQuestion', None)
self.variantDetails = kwargs.get(
'variantDetails', None)
class VersionControl(ProtocolElement):
"""
No documentation
"""
_schemaSource = """
{"type": "record", "name": "VersionControl", "namespace": "org.gel.models.participant.avro",
"fields": [{"name": "GitVersionControl", "type": "string", "doc": "", "default": "1.0.3"}]}
"""
schema = avro_parse(_schemaSource)
requiredFields = {}
@classmethod
def isEmbeddedType(cls, fieldName):
embeddedTypes = {}
return fieldName in embeddedTypes
@classmethod
def getEmbeddedType(cls, fieldName):
embeddedTypes = {}
return embeddedTypes[fieldName]
__slots__ = [
'GitVersionControl'
]
def __init__(self, **kwargs):
self.GitVersionControl = kwargs.get(
'GitVersionControl', '1.0.3')
class Zygosity(object):
"""
It is a representation of the zygosity * `reference_homozygous`:
0/0, 0|0 * `heterozygous`: 0/1, 1/0, 1|0, 0|1 *
`alternate_homozygous`: 1/1, 1|1 * `missing`: ./., .|. *
`half_missing_reference`: ./0, 0/., 0|., .|0 *
`half_missing_alternate`: ./1, 1/., 1|., .|1 *
`alternate_hemizigous`: 1 * `reference_hemizigous`: 0 * `unk`:
Anything unexpected
"""
reference_homozygous = "reference_homozygous"
heterozygous = "heterozygous"
alternate_homozygous = "alternate_homozygous"
missing = "missing"
half_missing_reference = "half_missing_reference"
half_missing_alternate = "half_missing_alternate"
alternate_hemizigous = "alternate_hemizigous"
reference_hemizigous = "reference_hemizigous"
unk = "unk"
def __hash__(self):
return str(self).__hash__()
class diseaseType(object):
"""
No documentation
"""
ADULT_GLIOMA = "ADULT_GLIOMA"
BLADDER = "BLADDER"
BREAST = "BREAST"
CARCINOMA_OF_UNKNOWN_PRIMARY = "CARCINOMA_OF_UNKNOWN_PRIMARY"
CHILDHOOD = "CHILDHOOD"
COLORECTAL = "COLORECTAL"
ENDOMETRIAL_CARCINOMA = "ENDOMETRIAL_CARCINOMA"
HAEMONC = "HAEMONC"
HEPATOPANCREATOBILIARY = "HEPATOPANCREATOBILIARY"
LUNG = "LUNG"
MALIGNANT_MELANOMA = "MALIGNANT_MELANOMA"
NASOPHARYNGEAL = "NASOPHARYNGEAL"
ORAL_OROPHARYNGEAL = "ORAL_OROPHARYNGEAL"
OVARIAN = "OVARIAN"
PROSTATE = "PROSTATE"
RENAL = "RENAL"
SARCOMA = "SARCOMA"
SINONASAL = "SINONASAL"
TESTICULAR_GERM_CELL_TUMOURS = "TESTICULAR_GERM_CELL_TUMOURS"
UPPER_GASTROINTESTINAL = "UPPER_GASTROINTESTINAL"
NON_HODGKINS_B_CELL_LYMPHOMA_LOW_MOD_GRADE = "NON_HODGKINS_B_CELL_LYMPHOMA_LOW_MOD_GRADE"
CLASSICAL_HODGKINS = "CLASSICAL_HODGKINS"
NODULAR_LYMPHOCYTE_PREDOMINANT_HODGKINS = "NODULAR_LYMPHOCYTE_PREDOMINANT_HODGKINS"
T_CELL_LYMPHOMA = "T_CELL_LYMPHOMA"
def __hash__(self):
return str(self).__hash__()
| 47.642905
| 105
| 0.585849
| 24,659
| 283,380
| 6.626424
| 0.044325
| 0.050979
| 0.044235
| 0.028507
| 0.843079
| 0.828287
| 0.80699
| 0.776097
| 0.733637
| 0.721556
| 0
| 0.002138
| 0.174716
| 283,380
| 5,947
| 106
| 47.650916
| 0.696548
| 0.043712
| 0
| 0.643341
| 1
| 0.311512
| 0.702467
| 0.121699
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046788
| false
| 0.003489
| 0.001026
| 0.011697
| 0.223476
| 0.002873
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
817e61e362a584ab6da81bd4349a076a476124ca
| 18,536
|
py
|
Python
|
crawling/selenium_naver_movie.py
|
Soooyeon-Kim/Python
|
e9e7e94e4a5a4ac94ff55347201cb4d24a5bb768
|
[
"MIT"
] | null | null | null |
crawling/selenium_naver_movie.py
|
Soooyeon-Kim/Python
|
e9e7e94e4a5a4ac94ff55347201cb4d24a5bb768
|
[
"MIT"
] | null | null | null |
crawling/selenium_naver_movie.py
|
Soooyeon-Kim/Python
|
e9e7e94e4a5a4ac94ff55347201cb4d24a5bb768
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# coding: utf-8
# > * 네이버 영화 디렉토리 : https://movie.naver.com/movie/sdb/browsing/bmovie.naver
# > * 개봉년도 (2019) : https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019
# > * 개봉년도 (2019) & 1페이지 : https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019&page=1
# In[4]:
import time, re, csv
from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
# In[81]:
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import StaleElementReferenceException
# ## 2019년 1페이지 영화 리스트
# In[2]:
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
driver.get("https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019")
time.sleep(2)
# 영화 리스트 불러오기
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
for box in boxes:
title = box.find_element_by_css_selector('a').text
print(title)
# ### 리스트 담기
# In[94]:
## 1페이지만
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
driver.get("https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019")
time.sleep(2)
title = []
# 영화 리스트 불러오기
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
for box in boxes:
title.append(box.find_element_by_css_selector('a').text)
# In[95]:
title
# ## 영화별 주소 불러오기
# In[50]:
import pandas as pd
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
url = "https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019"
driver.get(url)
time.sleep(2)
result = []
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
for box in boxes:
link = box.find_element_by_css_selector('a').get_attribute('href')
print(link)
# ### 리스트에 저장
# In[42]:
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
driver.get("https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019")
time.sleep(2)
# 영화 리스트 불러오기
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
link = []
for box in boxes:
link.append(box.find_element_by_css_selector('a').get_attribute('href'))
print(link)
# ### 일단 출력만
# In[45]:
type(link)
# In[46]:
len(link)
# In[47]:
link[0]
# In[49]:
link[19]
# ## 페이지별 영화 목록 가져오기 (1 ~ 9 page)
# In[71]:
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
driver.get("https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019")
time.sleep(2)
title = []
# 영화 리스트 불러오기
for i in range(0,9):
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
for box in boxes:
title.append(box.find_element_by_css_selector('a').text)
# pagebar : 각각 불러올 페이지 #old_content > div.pagenavigation > table > tbody > tr > td:nth-child(1) > a
page_bar = driver.find_elements_by_css_selector("div.pagenavigation > table > tbody > tr > td > *")
page_bar[i+1].click()
# In[72]:
title
# ## 2019년 개봉영화에 해당하는 모든 페이지의 영화 정보를 가져오는 코드 (1 ~ 43페이지)
# ## error : 1페이지부터 43페이지까지 출력하는 for문에서 항상 17페이지까지만 데이터가 수집되고 18페이지부터 에러가 발생합니다.
# > NoSuchElementException: Message: no such element: Unable to locate element: {"method":"xpath","selector":"//*[@id="old_content"]/div[3]/table/tbody/tr/td[13]"}
# (Session info: chrome=94.0.4606.61)
# In[80]:
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
driver.get("https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019")
time.sleep(2)
title = []
# 영화 리스트 불러오기
for i in range(1,43):
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
for box in boxes:
title.append(box.find_element_by_css_selector('a').text)
driver.find_element_by_xpath('''//*[@id="old_content"]/div[3]/table/tbody/tr/td[{}]'''.format(str(i))).click()
time.sleep(2)
# ### xpath가 아닌 다른 방법을 적용해도 같은 오류
# In[ ]:
'''
# pagebar : 각각 불러올 페이지 #old_content > div.pagenavigation > table > tbody > tr > td:nth-child(1) > a
page_bar = driver.find_elements_by_css_selector("div.pagenavigation > table > tbody > tr > td > *")
page_bar[i+1].click()
'''
# ### 에러발생하기 직전까지 수집된 영화 리스트
# In[79]:
title
# ---
# ## 한 영화의 정보만 가져오기
# ### 데이터 구성
# - 개요
# - 감독
# - 출연
# - 등급(관람가)
# 개요 구성
# - 장르
# - 국적
# - 러닝타임
# - 개봉일
# 한 페이지당 영화 목록 : `20개`
# ### 하나의 영화 정보만을 가져오는 코드 작성
# In[31]:
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 겨울왕국2 페이지 접속
driver.get("https://movie.naver.com/movie/bi/mi/basic.naver?code=136873")
time.sleep(2)
title = []
genre = []
nation = []
running = []
release = []
director = []
actor = []
grade = []
story = []
# 시간 지연
time.sleep(1)
# 제목
title.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > h3 > a").text)
# 장르
genre.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(1)").text)
# 국가
nation.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(2) > a").text)
# 러닝타임
running.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(3)").text)
# 개봉일
release.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(4)").text)
# 감독
director.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(4)").text)
# 출연
actor.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(6) > p").text)
# 등급
grade.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(8) > p > a").text)
# 줄거리
storypre = driver.find_element_by_css_selector("#content > div.article > div.section_group.section_group_frst > div:nth-child(1) > div > div > p").text
storypre.replace("\n","")
story.append(storypre)
df = pd.DataFrame({'title':title,'genre':genre,'nation':nation,'running':running, 'release':release, 'director':director, 'actor':actor, 'grade':grade, 'story':story})
# In[32]:
df
# ## 한 페이지에 속한 영화 리스트의 링크를 타고 총 20개의 영화정보를 가져오는 코드 작성
# ## error 2
# ### 문제점 : 출연 데이터가 존재하지 않으면 데이터 수집 X
# In[84]:
import pandas as pd
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
url = "https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019"
driver.get(url)
# 시간 지연
time.sleep(2)
# 리스트 정의
result = []
title = []
genre = []
nation = []
running = []
release = []
director = []
actor = []
grade = []
story = []
# 영화 리스트 불러오기
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
link = []
for box in boxes:
link.append(box.find_element_by_css_selector('a').get_attribute('href'))
# 링크 한줄 씩 반복문
# 한 페이지당 해당하는 영화는 20개
try:
for i in range(0,20):
# i번째 링크 접속하기
driver.get(link[i])
# 시간 지연
time.sleep(1)
# 제목
title.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > h3 > a").text)
# 장르
genre.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(1)").text)
# 국가
nation.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(2) > a").text)
# 러닝타임
running.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(3)").text)
# 개봉일
release.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(4)").text)
# 감독
director.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(4)").text)
# 출연
actor.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(6) > p").text)
# 등급
grade.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(8) > p > a").text)
# 줄거리
storypre = driver.find_element_by_css_selector("#content > div.article > div.section_group.section_group_frst > div:nth-child(1) > div > div > p").text
storypre.replace("\n","")
story.append(storypre)
# 데이터 프레임 생성
df = pd.DataFrame({'title':title,'genre':genre,'nation':nation,'running':running, 'release':release, 'director':director, 'actor':actor, 'grade':grade, 'story':story})
# 결과 값에 추가
result.append(df)
# 뒤로가기
driver.back()
except NoSuchElementException:
actor.append("null")
#except Exception as error:
# actor.append("null")
# continue
# df.to_csv("tmp_.csv",sep = '|',index = None)
# ## 중간에 에러가 발생하면 수집을 멈추게됨
# ## 저장된 영화정보를 살펴보면 16번째 데이터까지 저장되어있음
# ## ----> 17번째 영화를 확인해보니 출연 정보가 없었음
# In[85]:
df
# ## 코드수정 --> 출연 데이터만 제외하고 수집
# ### 데이터 구성:
# * 제목
# * 개요
# * 감독
# * 등급
# * 줄거리
# In[7]:
import pandas as pd
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2018 페이지 접속
url = "https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2018"
driver.get(url)
# 시간 지연
time.sleep(2)
# 리스트 정의
result = []
title = []
about = []
director = []
grade = []
story = []
# 영화 리스트 불러오기
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
link = []
for box in boxes:
link.append(box.find_element_by_css_selector('a').get_attribute('href'))
# 링크 한줄 씩 반복문
try:
for i in range(0,20):
# i번째 링크 접속하기
driver.get(link[i])
# 시간 지연
time.sleep(1)
# 제목
title.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > h3 > a").text)
# 개요
about.append(driver.find_element_by_css_selector('#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2)').text)
# 감독
director.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(4)").text)
# 등급
grade.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(8) > p > a").text)
# 줄거리
storypre = driver.find_element_by_css_selector("#content > div.article > div.section_group.section_group_frst > div:nth-child(1) > div > div > p").text
storypre.replace("\n","")
story.append(storypre)
# 데이터 프레임 생성
df = pd.DataFrame({'title':title,'about':about, 'director':director, 'grade':grade, 'story':story})
# 결과 값에 추가
result.append(df)
# 뒤로가기
driver.back()
except NoSuchElementException:
pass
# ### 수정전과 마찬가지로 출연정보가 없는 데이터를 기준으로 수집을 멈춥니다
# In[8]:
df
# 개요부분
# In[88]:
import pandas as pd
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2018 페이지 접속
url = "https://movie.naver.com/movie/bi/mi/basic.naver?code=166416"
driver.get(url)
driver.find_element_by_css_selector('#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2)').text
# ## 오류 (try except 문 적용 전)
# NoSuchElementException: Message: no such element: Unable to locate element: {"method":"css selector","selector":"#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(8) > p > a"}
# (Session info: chrome=94.0.4606.61)
#
# In[62]:
import pandas as pd
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
url = "https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019"
driver.get(url)
# 시간 지연
time.sleep(2)
# 리스트 정의
result = []
title = []
genre = []
nation = []
running = []
release = []
director = []
actor = []
grade = []
story = []
# 영화 리스트 불러오기
boxes = driver.find_elements_by_css_selector("#old_content > ul > li")
link = []
for box in boxes:
link.append(box.find_element_by_css_selector('a').get_attribute('href'))
# 링크 한줄 씩 반복문
for i in range(0,20):
# i번째 링크 접속하기
driver.get(link[i])
# 시간 지연
time.sleep(1)
# 제목
title.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > h3 > a").text)
# 장르
genre.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(1)").text)
# 국가
nation.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(2) > a").text)
# 러닝타임
running.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(3)").text)
# 개봉일
release.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(4)").text)
# 감독
director.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(4)").text)
# 출연
when = driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(6) > p")
if when is None:
actor.append("Null")
else:
actor.append(when.text)
# 등급
grade.append(driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(8) > p > a").text)
# 줄거리
storypre = driver.find_element_by_css_selector("#content > div.article > div.section_group.section_group_frst > div:nth-child(1) > div > div > p").text
storypre.replace("\n","")
story.append(storypre)
# 데이터 프레임 생성
df = pd.DataFrame({'title':title,'genre':genre,'nation':nation,'running':running, 'release':release, 'director':director, 'actor':actor, 'grade':grade, 'story':story})
# 결과 값에 추가
result.append(df)
# 뒤로가기
driver.back()
# ---
# ### 영화 정보 수집 사용하는 코드 (한줄씩)
# ## 영화 정보 수집하기
# In[ ]:
#content > div.article > div.mv_info_area > div.mv_info
# In[ ]:
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 2019 페이지 접속
driver.get("https://movie.naver.com/movie/sdb/browsing/bmovie.naver?open=2019")
time.sleep(2)
# ## 영화 리뷰 수집하기
# In[ ]:
import requests
from bs4 import BeautifulSoup
from selenium import webdriver
import time
# In[ ]:
import requests
from bs4 import BeautifulSoup
# ### 페이지 접속
# In[5]:
# 크롬창(웹드라이버) 열기
driver = webdriver.Chrome('C:/Users/sooyeon/Downloads/chromedriver.exe')
# 겨울왕국2 페이지 접속
driver.get("https://movie.naver.com/movie/bi/mi/basic.naver?code=136873")
time.sleep(2)
# ## 영화정보
# ### 제목
# * #content > div.article > div.mv_info_area > div.mv_info > h3 > a
# In[6]:
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > h3 > a").text
# ### 장르
# In[15]:
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(1)").text
# ### 국가
# In[16]:
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(2) > a").text
# ### 러닝타임
# In[17]:
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(3)").text
# ### 개봉일
# In[14]:
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(4)").text
# ### 감독
# In[9]:
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(4)").text
# ### 출연진
# In[8]:
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(6) > p").text
# ### 등급
# In[13]:
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(8) > p > a").text
# ### 줄거리
# In[21]:
story = driver.find_element_by_css_selector("#content > div.article > div.section_group.section_group_frst > div:nth-child(1) > div > div > p").text
# In[22]:
story.replace("\n","")
# ## 전체 코드
# In[23]:
# 제목
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > h3 > a").text
# 장르
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(1)").text
# 국가
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(2) > a").text
# 러닝타임
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(3)").text
# 개봉일
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(2) > p > span:nth-child(4)").text
# 감독
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(4)").text
# 출연
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(6) > p").text
# 등급
driver.find_element_by_css_selector("#content > div.article > div.mv_info_area > div.mv_info > dl > dd:nth-child(8) > p > a").text
# 줄거리
story = driver.find_element_by_css_selector("#content > div.article > div.section_group.section_group_frst > div:nth-child(1) > div > div > p").text
story.replace("\n","")
| 23.94832
| 203
| 0.663034
| 2,898
| 18,536
| 4.07764
| 0.113527
| 0.040619
| 0.073115
| 0.081239
| 0.891935
| 0.885758
| 0.885758
| 0.88085
| 0.864433
| 0.851062
| 0
| 0.02311
| 0.173608
| 18,536
| 773
| 204
| 23.979301
| 0.748335
| 0.162441
| 0
| 0.871369
| 0
| 0.240664
| 0.433554
| 0.055188
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.004149
| 0.074689
| 0
| 0.074689
| 0.012448
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
81aace954c7bee5b4cf737fa901730cb6e640088
| 206
|
py
|
Python
|
backend/foodapp/views/__init__.py
|
RaitzeR/FinnBros
|
a2d7e3e755af7bb22bb2ce779ea1f36c6bed961b
|
[
"MIT"
] | null | null | null |
backend/foodapp/views/__init__.py
|
RaitzeR/FinnBros
|
a2d7e3e755af7bb22bb2ce779ea1f36c6bed961b
|
[
"MIT"
] | 10
|
2020-06-05T18:08:03.000Z
|
2022-03-11T23:19:52.000Z
|
backend/foodapp/views/__init__.py
|
RaitzeR/FinnBros
|
a2d7e3e755af7bb22bb2ce779ea1f36c6bed961b
|
[
"MIT"
] | null | null | null |
from foodapp.views.category_views import *
from foodapp.views.community_views import *
from foodapp.views.food_views import *
from foodapp.views.watson_views import *
from foodapp.views.user_views import *
| 34.333333
| 43
| 0.830097
| 30
| 206
| 5.533333
| 0.3
| 0.331325
| 0.481928
| 0.53012
| 0.650602
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097087
| 206
| 5
| 44
| 41.2
| 0.892473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c4ef7093701868a97bdede631b1983ed5276699d
| 295,491
|
py
|
Python
|
app.py
|
bhavzie/HotelRMS
|
763d25ef74ff65bb5d1b15f73b0d50364423f4f6
|
[
"MIT"
] | null | null | null |
app.py
|
bhavzie/HotelRMS
|
763d25ef74ff65bb5d1b15f73b0d50364423f4f6
|
[
"MIT"
] | null | null | null |
app.py
|
bhavzie/HotelRMS
|
763d25ef74ff65bb5d1b15f73b0d50364423f4f6
|
[
"MIT"
] | null | null | null |
from flask import Flask, render_template, flash, request, session, url_for, session, jsonify, redirect, Response
from config import Config
from itsdangerous import URLSafeTimedSerializer, SignatureExpired
from functools import wraps
import smtplib
from flask_mysqldb import MySQL
from flask_mail import Mail, Message
from flask_mysqldb import MySQL
import datetime
import math
import json
import csv
from xlsxwriter.workbook import Workbook
from passlib.hash import sha256_crypt
app = Flask(__name__)
app.config.from_object(Config)
mail = Mail(app)
mysql = MySQL(app)
def generateConfirmationToken(email):
serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
return serializer.dumps(email, salt=app.config['SECURITY_PASSWORD_SALT'])
def confirmToken(token, expiration=10000000):
serializer = URLSafeTimedSerializer(app.config['SECRET_KEY'])
try:
email = serializer.loads(
token,
salt=app.config['SECURITY_PASSWORD_SALT'],
max_age=expiration
)
except SignatureExpired:
return 'The Token is expired'
except:
return False
return email
def sendMail(subjectv, recipientsv, linkv, tokenv, bodyv):
msg = Message(
subject = subjectv,
sender = 'no-reply@trompar.com',
recipients = recipientsv.split(),
bcc = ['trompar.sales@gmail.com']
)
link = url_for(linkv, token=tokenv, _external=True)
msg.body = bodyv + ' ' + link
mail.send(msg)
def sendMailQ(subjectv, recipientsv, linkv, tokenv, hotelId, hotelName, hotelPhone, default_email, bodyv):
msg = Message(
subject = subjectv,
sender = 'no-reply@trompar.com',
recipients = recipientsv.split(),
cc = [default_email],
bcc = ['trompar.sales@gmail.com']
)
link = url_for(linkv, id=tokenv, hotelId = hotelId, _external=True)
with open('static/images/mail2.png', 'rb') as fp:
msg.attach('mail2.jpg', 'image/jpg', fp.read(), 'inline', headers = [["Content-ID",'<ribbon>']])
with open('static/images/footer2.png', 'rb') as fp:
msg.attach('footer2.jpg', 'image/jpg', fp.read(), 'inline', headers = [["Content-ID",'<ribbon2>']])
msg.html = render_template('/mails/quote.html', link = link, hotelName = hotelName, hotelPhone = hotelPhone)
mail.send(msg)
def sendMail2(subjectv, recipientsv, bodyv):
# Confirm Email
msg = Message(
subject = subjectv,
sender = 'no-reply@trompar.com',
recipients = recipientsv.split(),
bcc = ['trompar.sales@gmail.com']
)
msg.body = bodyv
mail.send(msg)
def sendMailA(subjectv, recipientsv, bodyv, attachv):
msg = Message(
subject = subjectv,
sender = 'no-reply@trompar.com',
recipients = recipientsv.split(),
bcc = ['trompar.sales@gmail.com']
)
msg.body = bodyv
msg.attach(attachv)
mail.send(msg)
def sendMailAddHotel(subjectv, recipientsv):
msg = Message(
subject = subjectv,
sender = 'no-reply@trompar.com',
recipients = recipientsv.split(),
bcc = ['trompar.sales@gmail.com']
)
msg.html = render_template('/mails/addHotelMail.html', recipients = recipientsv)
mail.send(msg)
# DB Queries
def dbQueryInsert(table, myDict):
placeholders = ', '.join(['%s'] * len(myDict))
columns = ', '.join(myDict.keys())
values = myDict.values()
sql = 'Insert into %s ( %s ) VALUES ( %s )' %(table, columns, placeholders)
cursor = mysql.connection.cursor()
cursor.execute(sql, myDict.values())
mysql.connection.commit()
cursor.close()
# Mapping 1 => True, 0 => False and vice-versa
def getValC(value):
if value == None:
return 0
else:
return 1
def getValC2(value):
if value == 1:
return True
else:
return False
def procArr(value):
if value is None:
return ''
return ' '.join(value)
def procArr2(value):
string = ''
if value != None:
if value.count('cq') > 0:
string += 'Cheque, '
if value.count('bt') > 0:
string += ' Bank Transfer, '
if value.count('cc') > 0:
string += 'Credit Card, '
try:
string = string[:string.rindex(',')]
except:
string = string
return string
def checkOverride(value):
pre = value.split('(')
if len(pre) == 1:
return False
else:
try:
val = float(pre[0])
val2 = float(pre[1].split(" : ")[1].split('[')[0])
if (val != val2):
return True
else:
return False
except:
return False
def alterTables():
cursor = mysql.connection.cursor()
cursor.execute('SELECT table_name FROM information_schema.tables where TABLE_SCHEMA="testHotel";')
data = cursor.fetchall()
for d in data:
table = d['table_name']
if table != 'mapHotelId':
query = 'UPDATE {} set hotelId = 1 where 1 = 1'.format(table)
cursor.execute(query)
mysql.connection.commit()
cursor.close()
def dateFormat(value):
result = s[0] + "-" + s[1] + "-" + s[2]
return result
# Decorators
# Check if user logged in
def is_logged_in(f):
@wraps(f)
def wrap(*args, **kwargs):
if 'logged_in' in session:
return f(*args, **kwargs)
else:
flash("Unauthorized, Please Login", 'danger')
return render_template('login.html', title='Login')
return wrap
# Global Status Values
statusval1 = 'NEW'
statusval2 = 'QUOTED'
statusval3 = 'NEGOTIATED'
statusval4 = 'ACCEPTED'
statusval5 = 'CUSTOMER DECLINED'
statusval6 = 'DELETED'
statusval7 = 'SENT FOR REVIEW'
statusval8 = 'HOTEL DECLINED'
statusval9 = 'EXPIRED'
statusval10 = 'CONFIRMED'
statusval11 = 'NOT CONFIRMED'
url = app.config['SERVER_URL']
@app.errorhandler(404)
def error_404(e):
return render_template('error/404.html'), 404
@app.errorhandler(403)
def error_403(e):
return render_template('error/403.html'), 403
@app.errorhandler(500)
def error_500(e):
return render_template('error/500.html'), 500
@app.route('/confirm_email/<token>', methods=['GET', 'POST'])
def confirm_email(token):
email = confirmToken(token)
if (email == False):
flash('Your email could not be verified', 'danger')
return render_template('login.html', title = 'Login')
else:
# DB ADD the Verified Email Flag
cursor = mysql.connection.cursor()
cursor.execute('SELECT * FROM users where email = %s', [email])
data = cursor.fetchall()
data = data[0]
if data['userType'] == 'customer':
cursor.execute('UPDATE customers SET email_verified = 1 WHERE email = %s', [email])
elif data['userType'] == 'IATA':
cursor.execute('UPDATE iataUsers SET email_verified = 1 WHERE email = %s', [email])
elif data['userType'] == 'hoteluser':
cursor.execute('UPDATE hotelUsers SET email_verified = 1 WHERE email = %s', [email])
elif data['userType'] == 'developer':
cursor.execute('UPDATE developers SET email_verified = 1 WHERE email = %s', [email])
mysql.connection.commit()
cursor.close()
flash('Your email has been successfully verified', 'success')
return render_template('login.html', title = 'Login')
@app.route('/home', methods=['GET', 'POST'])
def home():
try:
if session['logged_in'] == True:
return render_template('index2.html', title = 'Home')
except:
return render_template('login.html', title = 'Login')
@app.route('/signIn', methods=['GET', 'POST'])
def index():
return render_template('login.html', title = 'Login')
@app.route('/iataRegistration', methods=['GET', 'POST'])
@is_logged_in
def iatar():
return render_template('users/registerIata.html', title = 'Register')
@app.route('/customerRegistrationR', methods=['GET', 'POST'])
@is_logged_in
def customerr():
return render_template('users/rcustomer.html', title='Register')
@app.route('/customerRegistrationI', methods=['GET', 'POST'])
@is_logged_in
def customerI():
return render_template('users/icustomer.html', title='Register')
@app.route('/customerRegistrationT', methods=['GET', 'POST'])
@is_logged_in
def customerT():
return render_template('users/tcustomer.html', title='Register')
@app.route('/customerRegistrationC', methods=['GET', 'POST'])
@is_logged_in
def customerC():
return render_template('users/ccustomer.html', title='Register')
@app.route('/registerI', methods = ['GET', 'POST'])
@is_logged_in
def registerI():
if request.method == 'POST':
fullName = request.form['fullName']
firstName = fullName.split(' ')[0]
email = request.form['email']
password = request.form['password']
phone = request.form['phone']
country = request.form['country']
agencyName = request.form['agencyName']
iataCode = request.form['iataCode']
password = sha256_crypt.hash(password)
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
""" sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
) """
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)',
(firstName, email, password, 'IATA', '', hotelId))
cursor.execute('INSERT INTO iataUsers(fullName, email, country, phone, password, iataCode, agencyName, hotelId) Values(%s, %s, %s, %s, %s, %s, %s, %s)',
(fullName, email, country, phone, password, iataCode, agencyName, hotelId))
mysql.connection.commit()
cursor.close()
else:
flash('Email Already Registered', 'danger')
return render_template('users/rcustomer.html', title="Register")
flash('You are now registered and can log in', 'success')
return redirect(url_for("home2"))
@app.route('/registerR', methods = ['GET', 'POST'])
@is_logged_in
def registerR():
if request.method == 'POST':
fullName = request.form['fullName']
firstName = fullName.split(' ')[0]
email = request.form['email']
password = request.form['password']
phone = request.form['phone']
country = request.form['country']
password = sha256_crypt.hash(password)
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
""" sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
) """
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)',
(firstName, email, password, 'customer', 'retail', hotelId))
cursor.execute('INSERT INTO customers(fullName, email, country, phone, password, userType, hotelId) Values(%s, %s, %s, %s, %s, %s, %s)', (fullName, email, country, phone, password, 'retail', hotelId))
mysql.connection.commit()
cursor.close()
else:
flash('Email Already Registered', 'danger')
return render_template('users/rcustomer.html', title="Register")
flash('You are now registered and can log in', 'success')
return redirect(url_for("home2"))
@app.route('/registerC', methods=['GET', 'POST'])
@is_logged_in
def registerC():
if request.method == 'POST':
fullName = request.form['fullName']
firstName = fullName.split(' ')[0]
email = request.form['email']
password = request.form['password']
phone = request.form['phone']
country = request.form['country']
organizationName = request.form['organizationName']
password = sha256_crypt.hash(password)
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
""" sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
) """
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)',
(firstName, email, password, 'customer', 'corporate', hotelId))
cursor.execute('INSERT INTO customers(fullName, email, country, phone, password, userType, organizationName, hotelId) Values(%s, %s, %s, %s, %s, %s, %s, %s)',
(fullName, email, country, phone, password, 'corporate', organizationName, hotelId))
mysql.connection.commit()
cursor.close()
else:
flash('Email Already Registered', 'danger')
return render_template('users/rcustomer.html', title="Register")
flash('You are now registered and can log in', 'success')
return redirect(url_for("home2"))
@app.route('/registerT', methods=['GET', 'POST'])
@is_logged_in
def registerT():
if request.method == 'POST':
fullName = request.form['fullName']
firstName = fullName.split(' ')[0]
email = request.form['email']
password = request.form['password']
phone = request.form['phone']
country = request.form['country']
agencyName = request.form['agencyName']
password = sha256_crypt.hash(password)
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
""" sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
) """
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)',
(firstName, email, password, 'customer', 'tour', hotelId))
cursor.execute('INSERT INTO customers(fullName, email, country, phone, password, userType, agencyName, hotelId) Values(%s, %s, %s, %s, %s, %s, %s, %s)',
(fullName, email, country, phone, password, 'tour', agencyName, hotelId))
mysql.connection.commit()
cursor.close()
else:
flash('Email Already Registered', 'danger')
return render_template('users/rcustomer.html', title="Register")
flash('You are now registered and can log in', 'success')
return redirect(url_for("home2"))
@app.route('/login', methods=['GET', 'POST'])
def login():
#Dropdown for developers
if request.method == 'POST':
email = request.form['email']
password = request.form['password']
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
error = 'Email not registered'
return render_template('login.html', error = error)
else:
data = data[0]
password_match = data['password']
if (sha256_crypt.verify(password, password_match)):
session['logged_in'] = True
session['email'] = email
session['userType'] = data['userType']
session['firstName'] = data['firstName']
session['hotelId'] = data['hotelId']
hotelId = data['hotelId']
'''
* userType:-
* For Others = 'customer'
* For IATA = 'iatauser'
* For Hotel = 'hoteluser' &
userSubType =
1) hotelAdmin
2) revenue
3) reservation
* For developer = 'developer'
'''
menuParams = {}
if session['userType'] == 'hoteluser':
menuParams = {
'request': True,
'requestCreate': True,
'requestManage': True,
'requestCreateAdhoc': True,
'requestCreateSeries': True,
'strategy': True,
'strategyRooms': True,
'strategyRate': True,
'strategyDiscount': True,
'strategyDiscountCreate': True,
'strategyDiscountMap': True,
'strategyForecast': True,
'settings': True,
'settingsRequest': True,
'settingsRequestCreate': True,
'settingsRequestMap': True,
'settingsContact': True,
'settingsContactCreate': True,
'settingsContactMap': True,
'settingsTime': True,
'settingsTimeCreate': True,
'settingsTimeMap': True,
'settingsNegotiation': True,
'settingsAutopilot': True,
'users': True,
'usersHotel': True,
'usersHotelAdd': True,
'usersHotelEdit': True,
'usersCustomer': True,
'usersCustomerAdd': True,
'usersCustomerEdit': True,
'usersCustomerUpload': True,
'analytics': True,
'analyticsDashboard': True,
'analyticsBehavior': True,
'analyticsPerformance': True,
'analyticsRevenue': True,
'analyticsTracking': True,
'help': True,
'helpUserGuide': True,
'helpFaq': True,
'helpTicketing': True,
'analyticsStdReport': True,
'strategyEvaluation': True,
'strategyAncillary': True,
'settingBusinessReward': True,
}
session['userSubType'] = data['userSubType']
userSubType = data['userSubType']
cursor.execute(
"SELECT * FROM hotelMenuAccess where userType = %s && hotelId = %s", [userSubType, hotelId])
d = cursor.fetchall()
cursor.execute("SELECT * FROM hotelUsers where email = %s && hotelId = %s", [email, hotelId])
dog = cursor.fetchall()
dog = dog[0]
if (dog['active'] == 0):
session.clear()
flash('You are de-activated. Kindly contact Super Admin!', 'danger')
return render_template('login.html', title = 'Login')
if (dog['email_verified'] == 0 or dog['email_verified'] == None):
session.clear()
flash('Please verify your email address before you login', 'danger')
return render_template('login.html')
if len(d) != 0:
d = d[0]
menuParams['request'] = getValC2(d['request'])
menuParams['requestCreate'] = getValC2(d['requestCreate'])
menuParams['requestManage'] = getValC2(d['requestManage'])
menuParams['requestCreateAdhoc'] = getValC2(
d['requestCreateAdhoc'])
menuParams['requestCreateSeries'] = getValC2(d['requestCreateSeries'])
menuParams['strategy'] = getValC2(d['strategy'])
menuParams['strategyRooms'] = getValC2(d['strategyRooms'])
menuParams['strategyRate'] = getValC2(d['strategyRate'])
menuParams['strategyDiscount'] = getValC2(d['strategyDiscount'])
menuParams['strategyDiscountCreate'] = getValC2(d['strategyDiscountCreate'])
menuParams['strategyDiscountMap'] = getValC2(d['strategyDiscountMap'])
menuParams['strategyForecast'] = getValC2(d['strategyForecast'])
menuParams['settingsRequest'] = getValC2(d['settingsRequest'])
menuParams['settingsRequestCreate'] = getValC2(d['settingsRequestCreate'])
menuParams['settingsRequestMap'] = getValC2(d['settingsRequestMap'])
menuParams['settingsContactCreate'] = getValC2(
d['settingsContactCreate'])
menuParams['settingsContactMap'] = getValC2(
d['settingsContactMap'])
menuParams['settingsTime'] = getValC2(
d['settingsTime'])
menuParams['settingsTimeCreate'] = getValC2(
d['settingsTimeCreate'])
menuParams['settingsTimeMap'] = getValC2(
d['settingsTimeMap'])
menuParams['settingsNegotiation'] = getValC2(
d['settingsNegotiation'])
menuParams['settingsAutopilot'] = getValC2(
d['settingsAutopilot'])
menuParams['usersHotel'] = getValC2(
d['usersHotel'])
menuParams['usersHotelAdd'] = getValC2(
d['usersHotelAdd'])
menuParams['usersCustomer'] = getValC2(
d['usersCustomer'])
menuParams['usersCustomerAdd'] = getValC2(
d['usersCustomerAdd'])
menuParams['usersCustomerEdit'] = getValC2(
d['usersCustomerEdit'])
menuParams['usersCustomerUpload'] = getValC2(
d['usersCustomerUpload'])
menuParams['analytics'] = getValC2(
d['analytics'])
menuParams['analyticsDashboard'] = getValC2(
d['analyticsDashboard'])
menuParams['analyticsBehavior'] = getValC2(
d['analyticsBehavior'])
menuParams['analyticsPerformance'] = getValC2(
d['analyticsPerformance'])
menuParams['analyticsRevenue'] = getValC2(
d['analyticsRevenue'])
menuParams['analyticsTracking'] = getValC2(
d['analyticsTracking'])
menuParams['help'] = getValC2(
d['help'])
menuParams['helpUserGuide'] = getValC2(
d['helpUserGuide'])
menuParams['helpFaq'] = getValC2(
d['helpFaq'])
menuParams['helpTicketing'] = getValC2(
d['helpTicketing'])
menuParams['settings'] = getValC2(
d['settings'])
menuParams['settingsContact'] = getValC2(
d['settingsContact'])
menuParams['users'] = getValC2(
d['users'])
menuParams['usersHotelEdit'] = getValC2(
d['usersHotelEdit'])
menuParams['analyticsStdReport'] = getValC2(d['analyticsStdReport'])
menuParams['strategyEvaluation'] = getValC2(d['strategyEvaluation'])
menuParams['settingBusinessReward'] = getValC2(d['settingBusinessReward'])
menuParams['strategyAncillary'] = getValC2(d['strategyAncillary'])
session['menuParams'] = menuParams
elif session['userType'] == 'iata':
cursor.execute(
"SELECT * FROM iataUsers where email = %s && hotelId = %s", [email, hotelId])
dog = cursor.fetchall()
dog = dog[0]
if (dog['email_verified'] == 0 or dog['email_verified'] == None):
session.clear()
flash('Please verify your email address before you login', 'danger')
return render_template('login.html')
if (dog['active'] == 0):
session.clear()
flash(
'You are de-activated. Kindly contact Super Admin!', 'danger')
return render_template('login.html', title='Login')
menuParams = {
'request': True,
'requestCreate': True,
'requestManage': True,
'requestCreateAdhoc': True,
'requestCreateSeries': True,
'users': True,
'usersAdd': True,
'usersEdit': True,
'analytics': True,
'analyticsDashboard': True,
'analyticsRequest': True,
'analyticsPerformance': True,
'analyticsTracking': True,
'help': True,
'helpUserGuide': True,
'helpFaq': True,
'helpTicketing': True
}
cursor.execute("SELECT * FROM iataMenuAccess where hotelId = %s", [hotelId])
d = cursor.fetchall()
if len(d) != 0:
d = d[0]
menuParams['request'] = getValC2(d['request'])
menuParams['requestCreate'] = getValC2(
d['requestCreate'])
menuParams['requestManage'] = getValC2(
d['requestManage'])
menuParams['users'] = getValC2(d['users'])
menuParams['usersAdd'] = getValC2(d['usersAdd'])
menuParams['usersEdit'] = getValC2(d['usersEdit'])
menuParams['analytics'] = getValC2(d['analytics'])
menuParams['analyticsDashboard'] = getValC2(d['analyticsDashboard'])
menuParams['analyticsRequest'] = getValC2(d['analyticsRequest'])
menuParams['analyticsTracking'] = getValC2(d['analyticsTracking'])
menuParams['analyticsPerformance'] = getValC2(d['analyticsPerformance'])
menuParams['requestCreateAdhoc'] = getValC2(d['requestCreateAdhoc'])
menuParams['requestCreateSeries'] = getValC2(d['requestCreateSeries'])
menuParams['help'] = getValC2(
d['help'])
menuParams['helpUserGuide'] = getValC2(
d['helpUserGuide'])
menuParams['helpFaq'] = getValC2(
d['helpFaq'])
menuParams['helpTicketing'] = getValC2(
d['helpTicketing'])
session['menuParams'] = menuParams
elif session['userType'] == 'customer':
cursor.execute("SELECT * FROM customers where email = %s && hotelId = %s", [email, hotelId])
dog = cursor.fetchall()
dog = dog[0]
if (dog['email_verified'] == 0 or dog['email_verified'] == None):
session.clear()
flash('Please verify your email address before you login', 'danger')
return render_template('login.html')
if (dog['active'] == 0):
session.clear()
flash('You are de-activated. Kindly contact Super Admin!', 'danger')
return render_template('login.html', title = 'Login')
menuParams = {
'request': True,
'requestCreate': True,
'requestManage': True,
'requestCreateAdhoc': True,
'requestCreateSeries': True,
'analytics': True,
'analyticsDashboard': True,
'analyticsRequest': True,
'analyticsPerformance': True,
'analyticsTracking': True,
'help': True,
'helpUserGuide': True,
'helpFaq': True,
'helpTicketing': True
}
cursor.execute("SELECT * FROM customerMenuAccess where hotelId = %s", [hotelid])
d = cursor.fetchall()
if len(d) != 0:
d = d[0]
menuParams['request'] = getValC2(d['request'])
menuParams['requestCreate'] = getValC2(
d['requestCreate'])
menuParams['requestManage'] = getValC2(
d['requestManage'])
menuParams['requestCreateAdhoc'] = getValC2(
d['requestCreateAdhoc'])
menuParams['requestCreateSeries'] = getValC2(
d['requestCreateSeries'])
menuParams['analytics'] = getValC2(d['analytics'])
menuParams['analyticsDashboard'] = getValC2(
d['analyticsDashboard'])
menuParams['analyticsRequest'] = getValC2(d['analyticsRequest'])
menuParams['analyticsPerformance'] = getValC2(d['analyticsPerformance'])
menuParams['analyticsTracking'] = getValC2(d['analyticsTracking'])
menuParams['help'] = getValC2(
d['help'])
menuParams['helpUserGuide'] = getValC2(
d['helpUserGuide'])
menuParams['helpFaq'] = getValC2(
d['helpFaq'])
menuParams['helpTicketing'] = getValC2(
d['helpTicketing'])
session['menuParams'] = menuParams
elif session['userType'] == 'developer':
cursor.execute("SELECT * FROM developers where email = %s", [email])
dog = cursor.fetchall()
dog = dog[0]
if (dog['email_verified'] == 0 or dog['email_verified'] == None):
session.clear()
flash('Please verify your email address before you login', 'danger')
return render_template('login.html')
if (dog['active'] == 0):
session.clear()
flash('You are de-activated. Kindly contact Super Admin!', 'danger')
return render_template('login.html', title = 'Login')
cursor.execute('SELECT hotelName From mapHotelId')
hotelName = cursor.fetchall()
return render_template('developer/hotelDropDown.html', hotelName = hotelName)
flash('You are now logged in', 'success')
return redirect(url_for('home2'))
else:
error = 'Passwords did not match'
return render_template('login.html', error = error)
return render_template('login.html', title = 'Login')
@app.route('/dropDownHotelSubmit', methods = ['GET', 'POST'])
@is_logged_in
def dropDownHotelSubmit():
hotelName = request.form['hotelName']
cursor = mysql.connection.cursor()
cursor.execute('SELECT hotelId from mapHotelId where hotelName = %s', [hotelName])
hotelId = cursor.fetchall()
hotelId = hotelId[0]['hotelId']
session['hotelId'] = hotelId
flash('You are now logged in', 'success')
return redirect(url_for('home2'))
@app.route('/switchHotel', methods = ['GET', "POST"])
@is_logged_in
def switchHotel():
cursor = mysql.connection.cursor()
cursor.execute('SELECT hotelName From mapHotelId')
hotelName = cursor.fetchall()
selected = session.get('hotelId')
cursor.execute('SELECT hotelName from mapHotelId where hotelId = %s', [selected])
selected = cursor.fetchall()
if len(selected) != 0:
selected = selected[0]['hotelName']
return render_template('developer/hotelDropDown.html', hotelName = hotelName, selected = selected)
@app.route('/forgotpassword', methods = ['GET', 'POST'])
def forgotpassword():
return render_template('users/forgotpasswordreq.html', title='forgotpassword')
@app.route('/passwordupdatereq', methods = ['GET', 'POST'])
def passwordupdatereq():
email = request.form['email']
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
flash('Email not registered', 'danger')
return render_template('login.html', title='Login')
else:
token = generateConfirmationToken(email)
sendMail(
subjectv='Update Password',
recipientsv=email,
linkv='passwordupdate',
tokenv=token,
bodyv='Change your password by clicking this link ',
)
flash('Kindly Check your email', 'success')
return redirect(url_for("home2"))
@app.route('/passwordupdate/<token>', methods = ['GET', 'POST'])
def passwordupdate(token):
email = confirmToken(token)
return render_template('users/forgotpassword.html', email=email)
@app.route('/passwordupdatef', methods = ['GET', 'POST'])
def passwordupdatef():
email = request.form['email']
password = request.form['password']
password = sha256_crypt.hash(password)
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
data = data[0]
cursor.execute('UPDATE users SET password = %s where email = %s', [password, email])
if data['userType'] == 'customer':
cursor.execute('UPDATE customers SET password = %s where email = %s', [password, email])
elif data['userType'] == 'IATA':
cursor.execute('UPDATE iataUsers SET password = %s where email = %s', [password, email])
elif data['userType'] == 'hotelUser':
cursor.execute('UPDATE hotelUsers SET password = %s where email = %s', [password, email])
elif data['userType'] == 'developer':
cursor.execute('UPDATE developers SET password = %s where email = %s', [password, email])
mysql.connection.commit()
cursor.close()
flash('Your password has been updated', 'success')
return render_template('login.html', title = 'Login')
@app.route('/signOut', methods=['GET', 'POST'])
@is_logged_in
def signOut():
session.clear()
flash('You are now logged out', 'success')
return render_template('login.html', title = 'Login')
@app.route('/hoteladduser', methods = ['GET', 'POST'])
@is_logged_in
def hoteladduser():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute("SELECT userType FROM hotelMenuAccess where hotelId = %s", [hotelId])
data = cursor.fetchall()
subtypes = []
for d in data:
subtypes.append(d['userType'])
if 'revenue' not in subtypes:
subtypes.append('revenue')
if 'reservation' not in subtypes:
subtypes.append('reservation')
if 'hotelAdmin' not in subtypes:
subtypes.append('hotelAdmin')
return render_template('users/hoteladduser.html', title='AddUser', subtypes=subtypes)
@app.route('/registerhotelusers', methods = ['GET', 'POST'])
@is_logged_in
def registerhotelusers():
if request.method == 'POST':
fullName = request.form['fullName']
email = request.form['email']
password = request.form['password']
userType = request.form['userType']
firstName = fullName.split(' ')[0]
password = sha256_crypt.hash(password)
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
)
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)', (firstName, email, password, "hoteluser", userType, hotelId))
cursor.execute('INSERT INTO hotelUsers(fullName, email, password, userType, hotelId) VALUES(%s, %s, %s, %s, %s)', (fullName, email, password, userType, hotelId))
else:
flash('Email Already Registered', 'danger')
return render_template('users/hoteladduser.html', title="Register")
mysql.connection.commit()
cursor.close()
flash('New Hotel user has been added', 'success')
return render_template('index2.html')
@app.route('/adddeveloper', methods = ['GET', 'POST'])
@is_logged_in
def adddeeloper():
return render_template('users/adddeveloper.html', title='Add')
@app.route('/registerdeveloper', methods = ['GET', 'POST'])
@is_logged_in
def registerdeveloper():
if request.method == 'POST':
fullName = request.form['name']
email = request.form['email']
password = request.form['password']
firstName = fullName.split(' ')[0]
password = sha256_crypt.hash(password)
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
)
cursor.execute('INSERT INTO developers(fullName, email, password, hotelId) values(%s, %s, %s, %s)',
(fullName, email, password, hotelId))
cursor.execute('INSERT INTO users(firstName, email, password, userType, hotelId) Values(%s, %s, %s, %s, %s)',
(firstName, email, password, 'developer', hotelId))
else:
flash('Email Already Registered', 'danger')
return render_template('users/adddeveloper.html', title="Register")
mysql.connection.commit()
cursor.close()
flash('You are now registered and can log in', 'success')
return render_template('login.html', title='Login')
return render_template('login.html', title='Login')
@app.route('/hoteladdusertype', methods = ["GET", "POST"])
@is_logged_in
def hoteladdusertype():
return render_template('users/hoteladdusertype.html', title='Register')
@is_logged_in
@app.route('/addusertype', methods = ["GET", 'POST'])
def addusertype():
requestv = getValC(request.form.get('request'))
requestCreate = getValC(request.form.get('requestCreate'))
requestManage = getValC(request.form.get('requestManage'))
requestCreateAdhoc = getValC(request.form.get('requestCreateAdhoc'))
requestCreateSeries = getValC(request.form.get('requestCreateSeries'))
strategy = getValC(request.form.get('strategy'))
strategyRooms = getValC(request.form.get('strategyRooms'))
strategyRate = getValC(request.form.get('strategyRate'))
strategyDiscount = getValC(request.form.get('strategyDiscount'))
strategyDiscountCreate = getValC(request.form.get('strategyDiscountCreate'))
strategyDiscountMap = getValC(request.form.get('strategyDiscountMap'))
strategyForecast = getValC(request.form.get('strategyForecast'))
settingsRequest = getValC(request.form.get('settingsRequest'))
settingsRequestCreate = getValC(request.form.get('settingsRequestCreate'))
settingsRequestMap = getValC(request.form.get('settingsRequestMap'))
settingsContactCreate = getValC(request.form.get('settingsContactCreate'))
settingsContactMap = getValC(request.form.get('settingsContactMap'))
settingsTime = getValC(request.form.get('settingsTime'))
settingsTimeCreate = getValC(request.form.get('settingsTimeCreate'))
settingsTimeMap = getValC(request.form.get('settingsTimeMap'))
settingsNegotiation = getValC(request.form.get('settingsNegotiation'))
settingsAutopilot = getValC(request.form.get('settingsAutopilot'))
usersHotel = getValC(request.form.get('usersHotel'))
usersHotelAdd = getValC(request.form.get('usersHotelAdd'))
usersCustomer = getValC(request.form.get('usersCustomer'))
usersCustomerAdd = getValC(request.form.get('usersCustomerAdd'))
usersCustomerEdit = getValC(request.form.get('usersCustomerEdit'))
usersCustomerUpload = getValC(request.form.get('usersCustomerUpload'))
analytics = getValC(request.form.get('analytics'))
analyticsDashboard = getValC(request.form.get('analyticsDashboard'))
analyticsBehavior = getValC(request.form.get('analyticsBehavior'))
analyticsPerformance = getValC(request.form.get('analyticsPerformance'))
analyticsRevenue = getValC(request.form.get('analyticsRevenue'))
analyticsTracking = getValC(request.form.get('analyticsTracking'))
settings = getValC(request.form.get('settings'))
settingsContact = getValC(request.form.get('settingsContact'))
users = getValC(request.form.get('users'))
usersHotelEdit = getValC(request.form.get('usersHotelEdit'))
userType = request.form['userType']
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From hotelMenuAccess where userType = %s && hotelId = %s', [userType, hotelId])
data = cursor.fetchall()
if len(data) == 0:
cursor.execute('INSERT INTO hotelMenuAccess(userType,request, requestCreate, requestManage, strategy, strategyRooms, strategyForecast, strategyRate, strategyDiscount, settings, settingsRequest, settingsContact, settingsTime, settingsNegotiation, settingsAutopilot, users, usersHotel, usersCustomer, analytics, analyticsDashboard, analyticsBehavior, analyticsPerformance, analyticsRevenue,analyticsTracking, requestCreateAdhoc, requestCreateSeries, strategyDiscountCreate, strategyDiscountMap, settingsRequestCreate, settingsRequestMap, settingsContactCreate, settingsContactMap, settingsTimeMap, settingsTimeCreate, usersHotelAdd, usersHotelEdit, usersCustomerAdd, usersCustomerEdit, usersCustomerUpload, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
userType, requestv, requestCreate, requestManage, strategy, strategyRooms, strategyForecast, strategyRate, strategyDiscount, settings, settingsRequest, settingsContact, settingsTime, settingsNegotiation, settingsAutopilot, users, usersHotel, usersCustomer, analytics, analyticsDashboard, analyticsBehavior, analyticsPerformance, analyticsRevenue, analyticsTracking, requestCreateAdhoc, requestCreateSeries, strategyDiscountCreate, strategyDiscountMap, settingsRequestCreate, settingsRequestMap, settingsContactCreate, settingsContactMap, settingsTimeMap, settingsTimeCreate, usersHotelAdd, usersHotelEdit, usersCustomerAdd, usersCustomerEdit, usersCustomerUpload, hotelId])
else:
flash('UserType Already Registered', 'danger')
return render_template('users/hoteladdusertype.html', title="Register")
mysql.connection.commit()
cursor.close()
flash('New userType added', 'success')
return render_template('index2.html', title='UserType')
@app.route('/managehotelusers', methods = ['GET', 'POST'])
@is_logged_in
def managehotelusers():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT fullName, email, userType, active FROM hotelUsers where hotelId = %s', [hotelId])
result = cursor.fetchall()
cursor.close()
data = []
for res in result:
res['firstName'] = res['fullName'].split()[0]
data.append(res)
return render_template('users/managehotelusers.html', title= 'Users', data = data)
@app.route('/showprofile/<email>', methods = ['GET', 'POST'])
@is_logged_in
def showprofile(email):
cursor = mysql.connection.cursor()
cursor.execute('SELECT * FROM hotelUsers where email = %s', [email])
data = cursor.fetchall()
cursor.close()
data[0]['email_verified'] = "Yes" if data[0]['email_verified'] else "No"
data[0]['fullName'] = data[0]['fullName'].split(' ')[0]
data[0]['active'] = 'Yes' if data[0]['active'] else 'No'
return render_template('users/showprofile.html', title='Profile', data=data[0])
@app.route('/showprofileAll/<email>', methods = ['GET', 'POST'])
@is_logged_in
def showprofileAll(email):
cursor = mysql.connection.cursor()
cursor.execute('SELECT * FROM users where email = %s', [email])
data = cursor.fetchall()
data = data[0]
if (data['userType'] == 'developer'):
cursor.execute('SELECT active, email_verified from developers where email = %s', [email])
rr = cursor.fetchall()
rr = rr[0]
elif (data['userType'] == 'IATA'):
cursor.execute(
'SELECT active, email_verified from iataUsers where email = %s', [email])
rr = cursor.fetchall()
rr = rr[0]
elif (data['userType'] == 'hoteluser'):
cursor.execute('SELECT active, email_verified from hotelUsers where email = %s', [email])
rr = cursor.fetchall()
rr = rr[0]
elif (data['userType'] == 'customer'):
cursor.execute('SELECT active, email_verified from customers where email = %s', [email])
rr = cursor.fetchall()
rr = rr[0]
data['email_verified'] = "Yes" if rr['email_verified'] else "No"
data['active'] = 'Yes' if rr['active'] else 'No'
return render_template('users/showprofileAll.html', title='Profile', data=data)
@app.route('/editUser/<email>', methods = ["GET", "POST"])
@is_logged_in
def editUser(email):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * FROM hotelUsers where email = %s && hotelId = %s', [email, hotelId])
data = cursor.fetchall()
cursor.execute("SELECT userType FROM hotelMenuAccess where hotelId = %s", [hotelId])
data1 = cursor.fetchall()
subtypes = []
for d in data1:
subtypes.append(d['userType'])
if 'revenue' not in subtypes:
subtypes.append('revenue')
if 'reservation' not in subtypes:
subtypes.append('reservation')
if 'hotelAdmin' not in subtypes:
subtypes.append('hotelAdmin')
data[0]['email_verified'] = "Yes" if data[0]['email_verified'] else "No"
return render_template('users/editUser.html', title = 'Edit', data = data[0], subtypes = subtypes)
@app.route('/editUserAll/<email>', methods = ['GET', 'POST'])
@is_logged_in
def editUserAll(email):
cursor = mysql.connection.cursor()
cursor.execute('SELECT * FROM users where email = %s', [email])
data = cursor.fetchall()
data = data[0]
if (data['userType'] == 'developer'):
cursor.execute(
'SELECT fullName, active, email_verified from developers where email = %s', [email])
rr = cursor.fetchall()
rr = rr[0]
elif (data['userType'] == 'IATA'):
cursor.execute(
'SELECT fullName, active, email_verified from iataUsers where email = %s', [email])
rr = cursor.fetchall()
rr = rr[0]
elif (data['userType'] == 'hoteluser'):
cursor.execute(
'SELECT fullName, active, email_verified from hotelUsers where email = %s', [email])
rr = cursor.fetchall()
rr = rr[0]
elif (data['userType'] == 'customer'):
cursor.execute(
'SELECT fullName, active, email_verified from customers where email = %s', [email])
rr = cursor.fetchall()
rr = rr[0]
data['email_verified'] = "Yes" if rr['email_verified'] else "No"
data['active'] = 'Yes' if rr['active'] else 'No'
data['fullName'] = rr['fullName']
return render_template('users/editUserAll.html', data=data)
@app.route('/submitEditUser', methods = ['GET', 'POST'])
@is_logged_in
def submitEditUser():
name = request.form['name']
userType = request.form['userType']
email_verified = getValC(request.form.get('email_verified'))
active = getValC(request.form.get('active'))
firstName = name.split()[0]
email = request.form['email']
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('Update hotelUsers SET fullName = %s, userType = %s, email_verified = %s, active = %s WHERE email = %s && hotelId = %s',(name, userType, email_verified, active, email, hotelId))
cursor.execute('Update users SET firstName = %s, userSubType = %s WHERE email = %s && hotelId = %s', (firstName, userType, email, hotelId))
mysql.connection.commit()
cursor.close()
flash('Hotel user has been edited', 'success')
return redirect(url_for("home2"))
@app.route('/submitEditUserAll2', methods = ["GET", 'POST'])
@is_logged_in
def submitEditUserAll2():
name = request.form['name']
email_verified = getValC(request.form.get('email_verified'))
active = getValC(request.form.get('active'))
firstName = name.split()[0]
email = request.form['email']
cursor = mysql.connection.cursor()
cursor.execute('SELECT * FROM users where email = %s', [email])
data = cursor.fetchall()
data = data[0]
if (data['userType'] == 'developer'):
cursor.execute('Update developers SET fullName = %s, email_verified = %s, active = %s where email = %s', [name, email_verified, active, email])
elif (data['userType'] == 'IATA'):
cursor.execute('Update iataUsers SET fullName = %s, email_verified = %s, active = %s where email = %s', [
name, email_verified, active, email])
elif (data['userType'] == 'hoteluser'):
cursor.execute('Update hotelUsers SET fullName = %s, email_verified = %s, active = %s where email = %s', [
name, email_verified, active, email])
elif (data['userType'] == 'customer'):
cursor.execute('Update customers SET fullName = %s, email_verified = %s, active = %s where email = %s', [
name, email_verified, active, email])
cursor.execute('Update users SET firstName = %s WHERE email = %s', (firstName, email))
mysql.connection.commit()
cursor.close()
flash('User has been edited', 'success')
return render_template('index2.html')
@is_logged_in
@app.route('/deactivateUser/<email>', methods = ['GET', 'POST'])
def deactivateUser(email):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute("UPDATE hotelUsers SET active = 0 where email = %s && hotelId = %s", [email, hotelId])
mysql.connection.commit()
cursor.close()
flash("User has been de-activated", 'success')
return redirect(url_for("managehotelusers"))
@app.route('/deactivateUserAll/<email>', methods=['GET', 'POST'])
@is_logged_in
def deactivateUserAll(email):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * FROM users where email = %s && hotelId = %s', [email, hotelId])
data = cursor.fetchall()
data = data[0]
if (data['userType'] == 'developer'):
cursor.execute('Update developers SET active = 0 where email = %s', [
email])
elif (data['userType'] == 'IATA'):
cursor.execute('Update iataUsers SET active = 0 where email = %s', [
email])
elif (data['userType'] == 'hoteluser'):
cursor.execute('Update hotelUsers SET active = 0 where email = %s', [
email])
elif (data['userType'] == 'customer'):
cursor.execute('Update customers SET active = 0 where email = %s', [
email])
mysql.connection.commit()
cursor.close()
flash("User has been de-activated", 'success')
return redirect(url_for('viewAllUsers'))
@app.route('/deactivateC/<email>', methods=['GET', 'POST'])
@is_logged_in
def deactivateC(email):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * FROM users where email = %s && hotelId = %s', [email, hotelId])
data = cursor.fetchall()
data = data[0]
if (data['userType'] == 'customer'):
cursor.execute('Update customers SET active = 0 where email = %s', [
email])
elif (data['userType'] == 'IATA'):
cursor.execute('Update iataUsers SET active = 0 where email = %s', [
email])
mysql.connection.commit()
flash("User has been de-activated", 'success')
return redirect(url_for('editCustomers'))
@app.route('/activateC/<email>', methods=['GET', 'POST'])
@is_logged_in
def activateC(email):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * FROM users where email = %s && hotelId = %s', [email, hotelId])
data = cursor.fetchall()
data = data[0]
if (data['userType'] == 'customer'):
cursor.execute('Update customers SET active = 1 where email = %s', [
email])
elif (data['userType'] == 'IATA'):
cursor.execute('Update iataUsers SET active = 1 where email = %s', [
email])
mysql.connection.commit()
flash("User has been activated", 'success')
return redirect(url_for('editCustomers'))
@app.route('/activateUser/<email>', methods=['GET', 'POST'])
@is_logged_in
def activateUser(email):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute(
"UPDATE hotelUsers SET active = 1 where email = %s && hotelId = %s", [email, hotelId])
mysql.connection.commit()
cursor.close()
flash("User has been activated", 'success')
return redirect(url_for("managehotelusers"))
@app.route('/activateUserAll/<email>', methods=['GET', 'POST'])
@is_logged_in
def activateUserAll(email):
cursor = mysql.connection.cursor()
cursor.execute('SELECT * FROM users where email = %s', [email])
data = cursor.fetchall()
data = data[0]
if (data['userType'] == 'developer'):
cursor.execute('Update developers SET active = 1 where email = %s', [
email])
elif (data['userType'] == 'IATA'):
cursor.execute('Update iataUsers SET active = 1 where email = %s', [
email])
elif (data['userType'] == 'hoteluser'):
cursor.execute('Update hotelUsers SET active = 1 where email = %s', [
email])
elif (data['userType'] == 'customer'):
cursor.execute('Update customers SET active = 1 where email = %s', [
email])
mysql.connection.commit()
cursor.close()
flash("User has been activated", 'success')
return redirect(url_for('viewAllUsers'))
@app.route('/myprofile/<email>', methods = ['GET', 'POST'])
@is_logged_in
def myprofile(email):
cursor = mysql.connection.cursor()
cursor.execute('SELECT userType FROM users where email = %s', [email])
data = cursor.fetchall()
data = data[0]['userType']
result = []
if data == 'hoteluser':
cursor.execute('SELECT * From hotelUsers where email = %s', [email])
result = cursor.fetchall()
elif data == 'customer':
cursor.execute('SELECT * From customers where email = %s', [email])
result = cursor.fetchall()
elif data == 'IATA':
cursor.execute('SELECT * From iataUsers where email = %s', [email])
result = cursor.fetchall()
elif data == 'developer':
cursor.execute('SELECT * From developers where email = %s', [email])
result = cursor.fetchall()
result = result[0]
result['email_verified'] = "Yes" if result['email_verified'] else 'No'
if 'active' in result.keys():
result['active'] = "Yes" if result['active'] else 'No'
result['firstName'] = result['fullName'].split(' ')[0]
return render_template('users/myProfile.html', data= result)
@app.route('/submitEditUserAll', methods=['GET', 'POST'])
@is_logged_in
def submitEditUserAll():
name = request.form['name']
phone = request.form.get('phone')
country = request.form.get('country')
email = request.form['email']
agencyName = request.form.get('agencyName')
iataCode = request.form.get('iataCode')
organizationName = request.form.get('organizationName')
firstName = name.split(' ')[0]
cursor = mysql.connection.cursor()
cursor.execute('SELECT userType From users where email = %s', [email])
data = cursor.fetchall()
data = data[0]['userType']
if data == 'hoteluser':
cursor.execute('Update hotelUsers SET fullName = %s WHERE email = %s',
(name, email))
elif data == 'customer':
cursor.execute('Update customers SET fullName = %s, phone = %s, country = %s WHERE email = %s',
(name, phone, country, email))
elif data == 'IATA':
cursor.execute('Update iataUsers SET fullName = %s, phone = %s, country = %s WHERE email = %s',
(name, phone, country, email))
elif data == 'developer':
cursor.execute('Update developers SET fullName = %s, phone = %s WHERE email = %s',
(name, phone, email))
cursor.execute('Update users SET firstName = %s WHERE email = %s',
(firstName, email))
mysql.connection.commit()
cursor.close()
flash('User Details updated', 'success')
return redirect(url_for('home2'))
@app.route('/inviteemail', methods = ['GET', 'POST'])
@is_logged_in
def inviteemail():
email = request.form['email']
userType = request.form['userType']
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
hotelId = session.get('hotelId')
if len(data) != 0:
flash('Email already registered', 'danger')
return render_template('login.html', title='Login')
else:
token = generateConfirmationToken(email)
cursor.execute('INSERT INTO inviteEmail(email, userType, hotelId) VALUES(%s, %s, %s)', [email, userType, hotelId])
mysql.connection.commit()
cursor.close()
sendMail(
subjectv='Invite to TROMPAR',
recipientsv=email,
linkv='addhoteluserinv',
tokenv=token,
bodyv='Kindly fill the form to complete registration'
)
flash('Invitation sent to email', 'success')
return render_template('index2.html', title='Login')
@app.route('/addhoteluserinv<token>', methods = ['GET', 'POST'])
@is_logged_in
def addhoteluserinv(token):
email = confirmToken(token)
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From inviteEmail where email = %s', [email])
data = cursor.fetchall()
data = data[0]
userType = data['userType']
return render_template('users/addhoteluserinv.html', title = 'Register', email = email, userType = userType)
@app.route('/edituserType', methods = ['GET', 'POST'])
@is_logged_in
def edituserType():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute(
'SELECT * From hotelMenuAccess where hotelId = %s', [hotelId])
datah = cursor.fetchall()
if len(datah) != 0:
datah = datah[0]
cursor.execute("SELECT userType FROM hotelMenuAccess where hotelId = %s", [hotelId])
data = cursor.fetchall()
subtypes = []
for d in data:
subtypes.append(d['userType'])
if 'revenue' not in subtypes:
subtypes.append('revenue')
if 'reservation' not in subtypes:
subtypes.append('reservation')
if 'hotelAdmin' not in subtypes:
subtypes.append('hotelAdmin')
return render_template('users/editusertype.html', datah=datah, subtypes=subtypes)
@app.route('/euserType', methods=['GET', 'POST'])
@is_logged_in
def euserType():
userType = request.form.get('userType')
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute(
'SELECT * From hotelMenuAccess where userType = %s && hotelId = %s', [userType, hotelId])
datah = cursor.fetchall()
if len(datah) != 0:
datah = datah[0]
return render_template('users/eusertype.html', datah=datah, userType=userType)
@app.route('/submiteditusertype', methods = ['GET', 'POST'])
@is_logged_in
def submiteditusertype():
requestv = getValC(request.form.get('request'))
requestCreate = getValC(request.form.get('requestCreate'))
requestManage = getValC(request.form.get('requestManage'))
requestCreateAdhoc = getValC(request.form.get('requestCreateAdhoc'))
requestCreateSeries = getValC(request.form.get('requestCreateSeries'))
strategy = getValC(request.form.get('strategy'))
strategyRooms = getValC(request.form.get('strategyRooms'))
strategyRate = getValC(request.form.get('strategyRate'))
strategyDiscount = getValC(request.form.get('strategyDiscount'))
strategyDiscountCreate = getValC(request.form.get('strategyDiscountCreate'))
strategyDiscountMap = getValC(request.form.get('strategyDiscountMap'))
strategyForecast = getValC(request.form.get('strategyForecast'))
settingsRequest = getValC(request.form.get('settingsRequest'))
settingsRequestCreate = getValC(request.form.get('settingsRequestCreate'))
settingsRequestMap = getValC(request.form.get('settingsRequestMap'))
settingsContactCreate = getValC(request.form.get('settingsContactCreate'))
settingsContactMap = getValC(request.form.get('settingsContactMap'))
settingsTime = getValC(request.form.get('settingsTime'))
settingsTimeCreate = getValC(request.form.get('settingsTimeCreate'))
settingsTimeMap = getValC(request.form.get('settingsTimeMap'))
settingsNegotiation = getValC(request.form.get('settingsNegotiation'))
settingsAutopilot = getValC(request.form.get('settingsAutopilot'))
usersHotel = getValC(request.form.get('usersHotel'))
usersHotelAdd = getValC(request.form.get('usersHotelAdd'))
usersCustomer = getValC(request.form.get('usersCustomer'))
usersCustomerAdd = getValC(request.form.get('usersCustomerAdd'))
usersCustomerEdit = getValC(request.form.get('usersCustomerEdit'))
usersCustomerUpload = getValC(request.form.get('usersCustomerUpload'))
analytics = getValC(request.form.get('analytics'))
analyticsDashboard = getValC(request.form.get('analyticsDashboard'))
analyticsBehavior = getValC(request.form.get('analyticsBehavior'))
analyticsPerformance = getValC(request.form.get('analyticsPerformance'))
analyticsRevenue = getValC(request.form.get('analyticsRevenue'))
analyticsTracking = getValC(request.form.get('analyticsTracking'))
settings = getValC(request.form.get('settings'))
settingsContact = getValC(request.form.get('settingsContact'))
users = getValC(request.form.get('users'))
usersHotelEdit = getValC(request.form.get('usersHotelEdit'))
userType = request.form['userType']
analyticsStdReport = getValC(request.form.get('analyticsStdReport'))
strategyEvaluation = getValC(request.form.get('strategyEvaluation'))
settingBusinessReward = getValC(request.form.get('settingBusinessReward'))
strategyAncillary = getValC(request.form.get('strategyAncillary'))
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * from hotelMenuAccess where userType = %s && hotelId = %s', [userType, hotelId])
length = cursor.fetchall()
if len(length) != 0:
cursor.execute('Update hotelMenuAccess SET request = %s, requestCreate = %s, requestManage = %s, strategy = %s, strategyRooms = %s, strategyForecast = %s, strategyRate = %s, strategyDiscount = %s, settings = %s, settingsRequest = %s, settingsContact = %s, settingsTime = %s, settingsNegotiation = %s, settingsAutopilot = %s, users = %s, usersHotel = %s, usersCustomer = %s, analytics = %s, analyticsDashboard = %s, analyticsBehavior = %s, analyticsPerformance = %s, analyticsRevenue = %s, analyticsTracking = %s, requestCreateAdhoc = %s, requestCreateSeries = %s, strategyDiscountCreate = %s, strategyDiscountMap = %s, settingsRequestCreate = %s, settingsRequestMap = %s, settingsContactCreate = %s, settingsContactMap = %s, settingsTimeMap = %s, settingsTimeCreate = %s, usersHotelAdd = %s, usersHotelEdit = %s, usersCustomerAdd = %s, usersCustomerEdit = %s, usersCustomerUpload = %s, analyticsStdReport = %s, strategyEvaluation = %s,strategyAncillary = %s, settingBusinessReward = %s WHERE userType = %s && hotelId = %s', [
requestv, requestCreate, requestManage, strategy, strategyRooms, strategyForecast, strategyRate, strategyDiscount, settings, settingsRequest, settingsContact, settingsTime, settingsNegotiation, settingsAutopilot, users, usersHotel, usersCustomer, analytics, analyticsDashboard, analyticsBehavior, analyticsPerformance, analyticsRevenue, analyticsTracking, requestCreateAdhoc, requestCreateSeries, strategyDiscountCreate, strategyDiscountMap, settingsRequestCreate, settingsRequestMap, settingsContactCreate, settingsContactMap, settingsTimeMap, settingsTimeCreate, usersHotelAdd, usersHotelEdit, usersCustomerAdd, usersCustomerEdit, usersCustomerUpload, analyticsStdReport, strategyEvaluation,strategyAncillary, settingBusinessReward, userType, hotelId])
else:
cursor.execute('INSERT INTO hotelMenuAccess (request, requestCreate, requestManage, strategy, strategyRooms, strategyForecast, strategyRate, strategyDiscount, settings, settingsRequest, settingsContact, settingsTime, settingsNegotiation, settingsAutopilot, users, usersHotel, usersCustomer, analytics, analyticsDashboard, analyticsBehavior, analyticsPerformance, analyticsRevenue, analyticsTracking, requestCreateAdhoc, requestCreateSeries, strategyDiscountCreate, strategyDiscountMap, settingsRequestCreate, settingsRequestMap, settingsContactCreate, settingsContactMap, settingsTimeMap, settingsTimeCreate, usersHotelAdd, usersHotelEdit, usersCustomerAdd, usersCustomerEdit, usersCustomerUpload, analyticsStdReport, strategyEvaluation,strategyAncillary, settingBusinessReward, userType, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s) ',[
requestv, requestCreate, requestManage, strategy, strategyRooms, strategyForecast, strategyRate, strategyDiscount, settings, settingsRequest, settingsContact, settingsTime, settingsNegotiation, settingsAutopilot, users, usersHotel, usersCustomer, analytics, analyticsDashboard, analyticsBehavior, analyticsPerformance, analyticsRevenue, analyticsTracking, requestCreateAdhoc, requestCreateSeries, strategyDiscountCreate, strategyDiscountMap, settingsRequestCreate, settingsRequestMap, settingsContactCreate, settingsContactMap, settingsTimeMap, settingsTimeCreate, usersHotelAdd, usersHotelEdit, usersCustomerAdd, usersCustomerEdit, usersCustomerUpload, analyticsStdReport, strategyEvaluation,strategyAncillary, settingBusinessReward, userType, hotelId])
mysql.connection.commit()
cursor.close()
flash('UserType updated!', 'success')
return redirect(url_for('home2'))
@app.route('/viewAllUsers', methods=['GET', 'POST'])
@is_logged_in
def viewAllUsers():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute("SELECT * FROM users where hotelId = %s", [hotelId])
data = cursor.fetchall()
for r in data:
if (r['userType'] == 'developer'):
cursor.execute(
'SELECT active from developers where email = %s', [r['email']])
rr = cursor.fetchall()
if len(rr) != 0:
rr = rr[0]
elif (r['userType'] == 'IATA'):
cursor.execute(
'SELECT active from iataUsers where email = %s', [r['email']])
rr = cursor.fetchall()
if len(rr) != 0:
rr = rr[0]
elif (r['userType'] == 'hoteluser'):
cursor.execute(
'SELECT active from hotelUsers where email = %s', [r['email']])
rr = cursor.fetchall()
if len(rr) != 0:
rr = rr[0]
elif (r['userType'] == 'customer'):
cursor.execute(
'SELECT active from customers where email = %s', [r['email']])
rr = cursor.fetchall()
if len(rr) != 0:
rr = rr[0]
if len(rr) != 0:
r['active'] = rr['active']
cursor.close()
return render_template('users/manageAllUsers.html', data=data)
@app.route('/editCustomers', methods = ['GET', 'POST'])
@is_logged_in
def editCustomers():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From users where (userType = %s or userType = %s) && hotelId = %s', ["customer", "IATA", hotelId])
data = cursor.fetchall()
for r in data:
if (r['userType'] == 'customer'):
cursor.execute(
'SELECT active, email_verified from customers where email = %s', [r['email']])
elif (r['userType'] == 'IATA'):
cursor.execute(
'SELECT active, email_verified from iataUsers where email = %s', [r['email']])
rr = cursor.fetchall()
if len(rr) != 0:
rr = rr[0]
r['active'] = rr['active']
r['email_verified'] = rr['email_verified']
return render_template('users/managecustomers.html', data =data)
# Users Module Finished
@app.route('/strategyRooms', methods = ['GET', 'POST'])
@is_logged_in
def strategyRooms():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From room where hotelId = %s', [hotelId])
data = cursor.fetchall()
if len(data) == 0:
return render_template('strategy/strategyRooms.html')
else:
totalRooms = 0
for d in data:
totalRooms += int(d['count'])
return render_template('strategy/editstrategyRooms.html', data = data, totalRooms = totalRooms)
@app.route('/strategyRoomsSubmit', methods = ['GET', 'POST'])
@is_logged_in
def strategyRoomsSubmit():
inp = request.json
inp.remove(inp[0])
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
for i in inp:
cursor.execute("INSERT INTO room(type, count, single, doublev, triple, quad, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s)" , [i[0][0], i[1], int(i[2]), int(i[3]), int(i[4]), int(i[5]), hotelId])
mysql.connection.commit()
cursor.close()
flash('Your Room data has been entered', 'success')
return ('', 204)
@app.route('/editstrategyRoomsSubmit', methods = ['GET', 'POST'])
@is_logged_in
def editstrategyRoomsSubmit():
inp = request.json
if len(inp) == 0:
return render_template('index2.html')
inp.remove(inp[0])
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('DELETE FROM room where hotelId = %s', [hotelId])
mysql.connection.commit()
for i in inp:
cursor.execute("INSERT INTO room(type, count, single, doublev, triple, quad, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s)", [
i[0][0], i[1], int(i[2]), int(i[3]), int(i[4]), int(i[5]), hotelId])
mysql.connection.commit()
cursor.close()
flash('Your Room data has been updated', 'success')
return ('', 204)
@app.route('/strategyRate', methods = ['GET', 'POST'])
@is_logged_in
def strategyRate():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From room where hotelId = %s', [hotelId])
data = cursor.fetchall()
if len(data) == 0:
flash('Kindly fill types of Rooms first', 'danger')
return render_template('strategy/strategyRooms.html')
else:
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From rate where hotelId = %s', [hotelId])
data1 = cursor.fetchall()
if len(data1) == 0:
return render_template('strategy/strategyRate.html', data = data)
else:
cursor.execute('SELECT startDate, endDate from rate where hotelId = %s', [hotelId])
storedDates = cursor.fetchall()
for d in data1:
dow = ""
if (d['monday'] == '1'):
dow += " M, "
if (d['tuesday'] == '1'):
dow += " Tu, "
if (d['wednesday'] == '1'):
dow += "W, "
if (d['thursday'] == '1'):
dow += "Th, "
if (d['friday'] == '1'):
dow += "F, "
if (d['saturday'] == '1'):
dow += "Sa, "
if (d['sunday'] == '1'):
dow += "Su, "
try:
d['dow'] = dow[:dow.rindex(', ')]
except:
d['dow'] = dow
d['startDate'] = d['startDate'].strftime('%y-%b-%d')
x = d['startDate'].split('-')
strd = x[2] + " " + x[1] + ", " + x[0]
d['startDate'] = strd
d['endDate'] = d['endDate'].strftime('%y-%b-%d')
x = d['endDate'].split('-')
strd = x[2] + " " + x[1] + ", " + x[0]
d['endDate'] = strd
return render_template('strategy/editstrategyRate.html', data=data, data1=data1, storedDates=storedDates)
@app.route('/strategyRateSubmit', methods = ['GET', 'POST'])
@is_logged_in
def strategyRateSubmit():
inp = request.json
if len(inp) == 0:
return render_template('index2.html')
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('DELETE FROM rate where hotelId = %s', [hotelId])
mysql.connection.commit()
for i in inp:
cursor.execute("INSERT INTO rate(startDate, endDate, monday, tuesday, wednesday, thursday, friday, saturday, sunday, type, sor, dor, tor, qor, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", [i[0], i[1], i[2], i[3], i[4], i[5], i[6], i[7], i[8], i[9][0], i[10], i[11], i[12], i[13], hotelId])
mysql.connection.commit()
cursor.close()
flash('Your Rate data has been updated', 'success')
return ('', 204)
@app.route('/requestCreateAdhoc', methods = ['GET', 'POST'])
@is_logged_in
def requestCreateAdhoc():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From room where hotelId = %s', [hotelId])
data = cursor.fetchall()
if len(data) == 0:
flash('Kindly fill types of Rooms first', 'danger')
return render_template('strategy/strategyRooms.html')
c1 = 0
c2 = 0
for r in data:
if r['type'] == '1':
c1 = r['single'] + r['doublev'] + r['triple'] + r['quad']
elif r['type'] == '2':
c2 = r['single'] + r['doublev'] + r['triple'] + r['quad']
cursor.execute('SELECT email From users where userType != %s && userType != %s && hotelId = %s', ['hoteluser', 'developer', hotelId])
users = cursor.fetchall()
cursor.execute('SELECT * From settingsRequest where hotelId = %s Order By submittedOn desc', [hotelId])
result = cursor.fetchall()
check_flag = False
if len(result) != 0:
check_flag = True
result = result[0]
return render_template('request/requestCreateAdhoc.html', data = data, users = users, check_flag = check_flag, result = result, c1 = c1, c2 = c2)
@app.route('/requestCreateAdhocSubmit', methods = ['GET', 'POST'])
@is_logged_in
def requestCreateAdhocSubmit():
inp = request.json
cursor = mysql.connection.cursor()
username = session['email']
cursor.execute('SELECT userType from users where email = %s', [inp['createdFor']])
userType = cursor.fetchall()
userType = userType[0]['userType']
hotelId = session.get('hotelId')
if inp['commissionable'] == '':
inp['commissionable'] = 0
inp['groupBlock'] = 1 if inp['groupBlock'] == True else 0
inp['foc'] = 1 if inp['foc'] == True else 0
if inp['foc1'] == '':
inp['foc1'] = 0
if inp['foc2'] == '':
inp['foc2'] = 0
if inp['paymentDays'] == '':
inp['paymentDays'] = 0
if inp['comments'] == '':
inp['comments'] = 0
cursor.execute('SELECT Count(*) from request where hotelId = %s', [hotelId])
count = cursor.fetchall()
count = count[0]['Count(*)'] + 1
if (count < 10):
id = "TR" + "00" + str(count)
elif (count < 99):
id = "TR" + "0" + str(count)
today = datetime.date.today()
d1 = datetime.datetime.strptime(inp['checkIn'], "%Y/%m/%d").date()
lead = d1 - today
lead = lead.days
today = datetime.datetime.today()
cursor.execute('INSERT INTO request(category, groupName, checkIn, checkOut, nights, commissionable, groupBlock, foc, foc1, foc2, budget, formPayment, paymentTerms, paymentDays, comments, id, createdBy, createdFor, leadTime, status, userType, createdOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
inp['category'], inp['groupName'], inp['checkIn'], inp['checkOut'], inp['nights'], inp['commissionable'], inp['groupBlock'], inp['foc'], inp['foc1'], inp['foc2'], inp['budget'], procArr(inp['formPayment']), inp['paymentTerms'], inp['paymentDays'], inp['comments'], id, username, inp['createdFor'], lead, statusval1, userType, today, hotelId
])
table = inp['table_result']
for t in table:
if (t['type'] == '1'):
cursor.execute('INSERT INTO request1Bed(date, occupancy, count, id, hotelId) VALUES(%s, %s, %s, %s, %s)', [
t['date'], t['occupancy'], t['count'], id, hotelId])
else:
cursor.execute(
'INSERT INTO request2Bed(date, occupancy, count, id, hotelId) VALUES(%s, %s, %s, %s, %s)', [t['date'], t['occupancy'], t['count'], id, hotelId])
mysql.connection.commit()
cursor.close()
flash('Your Request has been entered', 'success')
return ('', 204)
@app.route('/', methods=['GET', 'POST'])
def home2():
try:
if session['logged_in'] == True:
if session['userType'] == 'hoteluser' or session['userType'] == 'developer':
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * FROM request where hotelId = %s', [hotelId])
data = cursor.fetchall()
data = data[::-1]
for d in data:
d['checkIn'] = d['checkIn'].strftime("%d-%b-%y")
return render_template('index2.html', title = 'Home', data = data)
else:
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From request where createdFor = %s && hotelId = %s', [session['email'], hotelId])
data = cursor.fetchall()
data = data[::-1]
for d in data:
d['checkIn'] = d['checkIn'].strftime("%d-%b-%y")
return render_template('index2.html', title='Home', data=data)
return render_template('index2.html', title='Home')
except:
#updatePasswords()
return render_template('login.html', title='Login')
@app.route('/strategyDiscountCreate', methods = ['GET', 'POST'])
@is_logged_in
def strategyDiscountCreate():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT count from room where hotelId = %s', [hotelId])
data = cursor.fetchall()
rooms = 0
for d in data:
rooms += int(d['count'])
cursor.execute('SELECT * FROM discountMap where hotelId = %s', [hotelId])
discountGrids = cursor.fetchall()
cursor.execute('SELECT * FROM discountMap WHERE defaultm = TRUE && hotelId = %s', [hotelId])
f = cursor.fetchall()
flag = False
defaultId = -1
if len(f) != 0:
flag = True
defaultId = f[0]['discountId']
for r in discountGrids:
r['startDate'] = r['startDate'].strftime('%y-%b-%d')
x = r['startDate'].split('-')
r['startDate']= x[2] + " " + x[1] + ", " + x[0]
r['endDate'] = r['endDate'].strftime('%y-%b-%d')
x = r['endDate'].split('-')
r['endDate'] = x[2] + " " + x[1] + ", " + x[0]
cursor.execute('SELECT startDate, endDate from discountMap where defaultm = 0 && hotelId = %s', [hotelId])
storedDates = cursor.fetchall()
factor = rooms * 20 // 100
halffactor = factor // 2
return render_template('strategy/strategyDiscountCreate.html', rooms = rooms, discountGrids = discountGrids, flag = flag, defaultId = defaultId, storedDates = storedDates, factor = factor, halffactor = halffactor)
@app.route('/strategyDiscountSubmit', methods = ['GET', 'POST'])
@is_logged_in
def strategyDiscountSubmit():
inp = request.json
occ = inp['occ']
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
for o in occ:
cursor.execute('INSERT INTO discountOcc(discountId, occ, col, hotelId) VALUES(%s, %s, %s, %s)', [inp['discountId'], o['occ'], o['col'], hotelId])
email = session['email']
time = datetime.datetime.utcnow()
cursor.execute('INSERT INTO discountMap(discountId, startDate, endDate, defaultm, createdBy, createdOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s)', [inp['discountId'], inp['startDate'], inp['endDate'], inp['defaultm'], email, time, hotelId])
for jindex, l in enumerate(inp['leadtime']):
lead = l.split(' - ')
leadMin = lead[0]
if (len(lead) == 2):
leadMax = lead[1]
else:
leadMax = 730
discountId = inp['discountId']
for index, r in enumerate(inp['ranges']):
range = r.split(' - ')
roomMin = range[0]
roomMax = range[1]
values = inp['values']
value = values[jindex][index]
cursor.execute('INSERT INTO discount(discountId, leadMin, leadMax, roomMin, roomMax, value, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s)', [discountId, leadMin, leadMax, roomMin, roomMax, value, hotelId])
mysql.connection.commit()
cursor.close()
flash('Your discount grid has been entered', 'success')
return ('', 204)
@app.route('/showDiscountGrid/<id>', methods = ['GET', 'POST'])
@is_logged_in
def showDiscountGrid(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * from discountMap where discountId = %s && hotelId = %s', [id, hotelId])
data = cursor.fetchall()
data = data[0]
cursor.execute('SELECT * FROM discount where discountId = %s && hotelId = %s', [id, hotelId])
grid = cursor.fetchall()
cursor.execute('SELECT * FROM discountOcc where discountId = %s && hotelId = %s', [id, hotelId])
occ = cursor.fetchall()
ranges = []
range1 = {}
for l in grid:
key = l['roomMin'] + " - " + l['roomMax']
if key not in range1:
range1[key] = 0
ranges.append(key)
else:
break
result = []
tup = {}
for d in grid:
l = []
min = d['leadMin']
max = d['leadMax']
key = str(min + " - " + max)
dic = {}
dic['min'] = d['roomMin']
dic['max'] = d['roomMax']
dic['value'] = d['value']
if key in tup:
tup[key].append(dic)
else:
tup[key] = [dic]
result = tup
cursor.execute('SELECT * From discountMap where defaultm = 1 && hotelId = %s', [hotelId])
ffm = cursor.fetchall()
flag = True
if len(ffm) == 0:
flag = False
cursor.execute(
'SELECT startDate, endDate from discountMap where defaultm = 0 AND discountId != %s && hotelId = %s', [id, hotelId])
storedDates = cursor.fetchall()
data['startDate'] = data['startDate'].strftime('%y-%b-%d')
x = data['startDate'].split('-')
data['startDate']= x[2] + " " + x[1] + ", " + x[0]
data['endDate'] = data['endDate'].strftime('%y-%b-%d')
x = data['endDate'].split('-')
data['endDate'] = x[2] + " " + x[1] + ", " + x[0]
return render_template('strategy/showDiscountGrid1.html', grid = grid, data = data, ranges = ranges, result = result, occ = occ, flag = flag, storedDates = storedDates)
@app.route('/unmarkDefault/<id>', methods = ['GET', 'POST'])
@is_logged_in
def unmarkDefault(id):
cursor = mysql.connection.cursor()
cursor.execute('UPDATE discountMap set defaultm = 0 where discountId = %s', [id])
mysql.connection.commit()
cursor.close()
flash('Grid marked as non default', 'success')
return redirect(url_for('strategyDiscountCreate'))
@app.route('/markDefault/<id>', methods = ['GET', 'POST'])
@is_logged_in
def markDefault(id):
cursor = mysql.connection.cursor()
cursor.execute('UPDATE discountMap set defaultm = 1 where discountId = %s', [id])
mysql.connection.commit()
cursor.close()
flash('Grid marked as default', 'success')
return redirect(url_for('strategyDiscountCreate'))
@app.route('/deactivateDiscount/<id>', methods = ['GET', 'POST'])
@is_logged_in
def deactivateDiscount(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute(
'UPDATE discountMap set active = 0 where discountId = %s && hotelId = %s', [id, hotelId])
mysql.connection.commit()
cursor.close()
flash('Grid Deactivated', 'danger')
return redirect(url_for('strategyDiscountCreate'))
@app.route('/activateDiscount/<id>', methods = ['GET', 'POST'])
@is_logged_in
def activateDiscount(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute(
'UPDATE discountMap set active = 1 where discountId = %s && hotelId = %s', [id, hotelId])
mysql.connection.commit()
cursor.close()
flash('Grid Activated', 'success')
return redirect(url_for('strategyDiscountCreate'))
@app.route('/editDiscountGrid', methods = ['GET', 'POST'])
@is_logged_in
def editDiscountGrid():
inp = request.json
cursor = mysql.connection.cursor()
email = session['email']
time = datetime.datetime.utcnow()
hotelId = session.get('hotelId')
cursor.execute('UPDATE discountMap SET startDate = %s, endDate = %s, createdBy = %s, createdOn = %s WHERE discountId = %s && hotelId = %s', [
inp['startDate'], inp['endDate'], email, time, inp['discountId'], hotelId
])
cursor.execute('DELETE FROM discount where discountId = %s && hotelId = %s', [inp['discountId'], hotelId])
mysql.connection.commit()
for jindex, l in enumerate(inp['leadtime']):
lead = l.split('-')
leadMin = lead[0]
if (len(lead) == 2):
leadMax = lead[1]
else:
leadMax = 365
discountId = inp['discountId']
for index, r in enumerate(inp['ranges']):
range = r.split(' - ')
roomMin = range[0]
roomMax = range[1]
values = inp['values']
value = values[jindex][index]
cursor.execute('INSERT INTO discount(discountId, leadMin, leadMax, roomMin, roomMax, value, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s)', [
discountId, leadMin, leadMax, roomMin, roomMax, value, hotelId])
mysql.connection.commit()
cursor.close()
flash('Your discount grid has been edited', 'success')
return ('', 204)
@app.route('/settingsAutopilot', methods = ['GET', 'POST'])
@is_logged_in
def settingsAutopilot():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * from autopilot where hotelId = %s', [hotelId])
data = cursor.fetchall()
for d in data:
if d['policy'] == 'manual':
d['policy'] = 'Manual Calculation'
d['startDate'] = d['startDate'].strftime('%y-%b-%d')
x = d['startDate'].split('-')
d['startDate']= x[2] + " " + x[1] + ", " + x[0]
d['endDate'] = d['endDate'].strftime('%y-%b-%d')
x = d['endDate'].split('-')
d['endDate'] = x[2] + " " + x[1] + ", " + x[0]
d['createdOn'] = d['createdOn'].strftime('%y-%b-%d')
x = d['createdOn'].split('-')
d['createdOn'] = x[2] + " " + x[1] + ", " + x[0]
return render_template('settings/settingsAutopilot.html', data = data)
@app.route('/settingsAutopilotSubmit', methods = ['GET', 'POST'])
@is_logged_in
def settingsAutopilotSubmit():
inp = request.json
email = session['email']
time = datetime.datetime.utcnow()
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('INSERT into autopilot(startDate, endDate, policy, policyName, createdBy, createdOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s)', [inp['startDate'], inp['endDate'], inp['policy'], inp['policyName'],
email, time, hotelId
])
mysql.connection.commit()
cursor.close()
flash('Your Autopilot setting has been added', 'success')
return ('', 204)
@app.route('/showAutopilot/<id>', methods = ['GET', 'POST'])
@is_logged_in
def showAutopilot(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From autopilot where policyName = %s && hotelId = %s', [id, hotelId])
data = cursor.fetchall()
data = data[0]
data['startDate'] = data['startDate'].strftime('%y-%b-%d')
x = data['startDate'].split('-')
data['startDate']= x[2] + " " + x[1] + ", " + x[0]
data['endDate'] = data['endDate'].strftime('%y-%b-%d')
x = data['endDate'].split('-')
data['endDate'] = x[2] + " " + x[1] + ", " + x[0]
return render_template('settings/showAutopilot.html', data = data)
@app.route('/editAutopilot', methods = ['GET', 'POST'])
@is_logged_in
def editAutopilot():
inp = request.json
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('UPDATE autopilot SET startDate = %s, endDate = %s, policy = %s WHERE policyName = %s && hotelId = %s', [
inp['startDate'], inp['endDate'], inp['policy'], inp['policyName'], hotelId
])
mysql.connection.commit()
cursor.close()
flash('Your Autopilot setting has been edited', 'success')
return ('', 204)
@app.route('/deactiveAutopilot/<id>', methods = ['GET', 'POST'])
@is_logged_in
def deactiveAutopilot(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('UPDATE autopilot set active = 0 where policyName = %s && hotelId = %s', [id, hotelId])
mysql.connection.commit()
cursor.close()
flash('Your Autopilot has been de-activated', 'danger')
return redirect(url_for('settingsAutopilot'))
@app.route('/activateAutopilot/<id>', methods = ['GET', 'POST'])
@is_logged_in
def activateAutopilot(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('UPDATE autopilot set active = 1 where policyName = %s && hotelId = %s', [id, hotelId])
mysql.connection.commit()
cursor.close()
flash('Your Autopilot has been activated', 'success')
return redirect(url_for('settingsAutopilot'))
@app.route('/settingsRequestCreate', methods=['GET', 'POST'])
@is_logged_in
def settingsRequestCreate():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * from settingsRequest where hotelId = %s Order By submittedOn desc', [hotelId])
result = cursor.fetchall()
flag = True
if len(result) == 0:
flag = False
else:
result = result[0]
return render_template('settings/settingsRequestCreate.html', result=result, flag=flag)
@app.route('/settingsRequestSubmit', methods=['GET', 'POST'])
@is_logged_in
def settingsRequestSubmit():
strategy = request.form['strategy']
count = request.form['count']
email = session['email']
time = datetime.datetime.utcnow()
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('INSERT INTO settingsRequest(strategy, count, submittedBy, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s)', [
strategy, count, email, time, hotelId])
mysql.connection.commit()
cursor.close()
flash('Request Settings have been updated', 'success')
return redirect(url_for("settingsRequestCreate"))
@app.route('/settingsNegotiation', methods=['GET', 'POST'])
@is_logged_in
def settingsNegotiation():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * from settingsNegotiation where hotelId = %s', [hotelId])
result = cursor.fetchall()
flag = True
if len(result) == 0:
flag = False
else:
result = result[0]
return render_template('settings/settingsNegotiation.html', result=result, flag=flag)
@app.route('/settingsNegotiationSubmit', methods=['GET', 'POST'])
@is_logged_in
def settingsNegotiationSubmit():
count = request.form['count']
email = session['email']
time = datetime.datetime.utcnow()
hotelId = session.get('hotelId')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From settingsNegotiation where hotelId = %s', [hotelId])
data = cursor.fetchall()
if len(data) == 0:
cursor.execute('INSERT INTO settingsNegotiation(count, submittedOn, submittedBy, hotelId) VALUES(%s, %s, %s, %s)', [
count, time, email, hotelId])
else:
cursor.execute("UPDATE settingsNegotiation set count = %s, submittedOn = %s, submittedBy = %s where hotelId = %s", [
count, time, email, hotelId])
mysql.connection.commit()
cursor.close()
flash('Negotiation settings have been updated', 'success')
return redirect(url_for("settingsNegotiation"))
@app.route('/settingsContractCreate', methods=['GET', 'POST'])
@is_logged_in
def settingsContactCreate():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * from contract where hotelId = %s', [hotelId])
result = cursor.fetchall()
for r in result:
r['submittedOn'] = r['submittedOn'].strftime('%y-%b-%d')
x = r['submittedOn'].split('-')
r['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
return render_template('settings/settingsContractCreate.html', result=result)
@app.route('/settingsContractSubmit', methods=['GET', 'POST'])
@is_logged_in
def settingsContractSubmit():
inp = request.json
cursor = mysql.connection.cursor()
email = session['email']
time = datetime.datetime.utcnow()
hotelId = session.get('hotelId')
cursor.execute('INSERT INTO contract(id, contract, submittedOn, submittedBy, hotelId) VALUES(%s, %s, %s, %s, %s)', [
inp['id'], inp['contract'], time, email, hotelId
])
mysql.connection.commit()
flash('The contract has been added', 'success')
return ('', 204)
@app.route('/settingsTimelimit', methods=['GET', 'POST'])
@is_logged_in
def settingsTimelimit():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From settingsTimelimit where hotelId = %s', [hotelId])
result = cursor.fetchall()
flag = True
if len(result) == 0:
flag = False
else:
result = result[0]
return render_template('settings/settingsTimelimit.html', result=result, flag=flag)
@app.route('/settingsTimelimitSubmit', methods=['GET', 'POST'])
@is_logged_in
def settingsTimelimitSubmit():
inp = request.json
cursor = mysql.connection.cursor()
email = session['email']
time = datetime.datetime.utcnow()
hotelId = session.get('hotelId')
cursor.execute('SELECT * from settingsTimelimit where hotelId =%s', [hotelId])
len1 = cursor.fetchall()
if len(len1) == 1:
cursor.execute('Update settingsTimelimit SET value = %s, submittedOn = %s, submittedBy = %s, days = %s where hotelId = %s', [
inp['value'], time, email, inp['days'], hotelId
])
else:
cursor.execute('INSERT INTO settingsTimelimit(value, submittedOn, submittedBy, days, hotelId) VALUES(%s, %s, %s, %s, %s)', [
inp['value'], time, email, inp['days'], hotelId
])
mysql.connection.commit()
flash('The time limit setting has been updated', 'success')
return ('', 204)
# Request Actions
def reset():
cursor = mysql.connection.cursor()
cursor.execute('UPDATE request set status = %s', [statusval1])
cursor.execute('DELETE From response')
cursor.execute('DELETE From responseDaywise')
cursor.execute("DELETE from responseAvg")
cursor.execute('DELETE from requestAccepted')
cursor.execute('DELETE from review')
cursor.execute('DELETE From DeclineRequest')
cursor.execute('DELETE From deletedRequest')
cursor.execute('DELETE From requestLastOpened')
mysql.connection.commit()
return ''
def updateIata():
cursor = mysql.connection.cursor()
cursor.execute("Update users set userType = %s where userType = %s", ['IATA', 'iata'])
cursor.execute('UPDATE request set userType = %s where userType = %s', ["IATA", 'iata'])
mysql.connection.commit()
return ''
def updatePasswords():
cursor = mysql.connection.cursor()
cursor.execute("Update users set password = %s", [sha256_crypt.hash('trompar2020')])
cursor.execute("Update iataUsers set password = %s", [sha256_crypt.hash('trompar2020')])
cursor.execute("Update hotelUsers set password = %s", [sha256_crypt.hash('trompar2020')])
cursor.execute("Update developers set password = %s", [sha256_crypt.hash('trompar2020')])
cursor.execute("Update customers set password = %s", [sha256_crypt.hash('trompar2020')])
mysql.connection.commit()
def updateDefault():
cursor = mysql.connection.cursor()
cursor.execute('UPDATE mapHotelId set default_email = %s where 1 = 1', ['default_email@gmail.com'])
mysql.connection.commit()
@app.route('/showRequest/<token>', methods = ['GET', 'POST'])
@is_logged_in
def showRequest(token):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
email = session['email']
cursor.execute('SELECT userType, userSubType from users where email = %s', [email])
ut = cursor.fetchall()
if len(ut) != 0:
ut = ut[0]
cursor.execute('SELECT status from request where id = %s && hotelId = %s', [token, hotelId])
status = cursor.fetchall()
status = status[0]['status']
if (status == statusval1):
cursor.execute('SELECT checkIn, checkOut from request where id = %s && hotelId = %s', [token, hotelId])
dates = cursor.fetchall()
dates = dates[0]
checkIn = dates['checkIn']
checkOut = dates['checkOut']
dates = []
day = datetime.timedelta(days=1)
cursor.execute('SELECT startDate, endDate from autopilot where active = "1" AND policy = "manual" && hotelId = %s', [hotelId])
excep = cursor.fetchall()
while checkIn < checkOut:
dates.append(checkIn)
checkIn = checkIn + day
newDates = []
for d in dates:
flag = True
for x in excep:
if (x['startDate'] <= d and x['endDate'] >= d):
flag = False
if (flag):
newDates.append(d)
dates = newDates
newDates = []
for d in dates:
day = d.strftime('%A')
day = day.lower()
query = 'SELECT * from rate where hotelId = %s && startDate <= %s AND endDate >= %s AND {} = 1'.format(
day)
cursor.execute(query, [hotelId, d, d])
pent = cursor.fetchall()
if len(pent) != 0:
newDates.append(d)
dates = newDates
newDates = []
for d in dates:
y = d
d = d.strftime('%y-%b-%d')
x = d.split('-')
d = x[2] + " " + x[1] + ", " + x[0]
newDates.append({'d': y, 's' : d})
dates = newDates
f = True
if len(dates) == 0:
f = False
return render_template('request/getOcc.html', dates = dates, token = token, flag = f)
elif (status == statusval2 or status == statusval4 or status == statusval5 or status == statusval6 or status == statusval8 or status == statusval10 or status == statusval11 or (status == statusval7 and ut.get('userSubType') == 'reservation') or status == statusval9):
data5 = []
if (status == statusval4):
cursor.execute('SELECT * From requestAccepted where requestId = %s && hotelId = %s', [token, hotelId])
data5 = cursor.fetchall()
data5 = data5[0]
temp1 = data5['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data5['time'] = x[2] + " " + x[1] + ", " + x[0]
data6 = []
if (status == statusval5 or status == statusval8):
cursor.execute("SELECT * From DeclineRequest where requestId = %s && hotelId = %s", [token, hotelId])
data6 = cursor.fetchall()
data6 = data6[0]
temp1 = data6['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data6['time'] = x[2] + " " + x[1] + ", " + x[0]
data7 = []
if (status == statusval6):
cursor.execute("SELECT * From deletedRequest where requestId = %s && hotelId = %s", [token, hotelId])
data7 = cursor.fetchall()
data7 = data7[0]
temp1 = data7['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data7['time'] = x[2] + " " + x[1] + ", " + x[0]
data8 = []
if (status == statusval7):
cursor.execute(
"SELECT * From review where requestId = %s && hotelId = %s", [token, hotelId])
data8 = cursor.fetchall()
data8 = data8[0]
temp1 = data8['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data8['time'] = x[2] + " " + x[1] + ", " + x[0]
data9 = []
if (status == statusval10):
cursor.execute('SELECT * From confirmRequest where requestId = %s && hotelId = %s', [token, hotelId])
data9 = cursor.fetchall()
data9 = data9[0]
temp1 = data9['submittedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data9['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
data10 = []
if (status == statusval11):
cursor.execute('SELECT * From notConfirmRequest where requestId = %s && hotelId = %s', [token, hotelId])
data10 = cursor.fetchall()
data10 = data10[0]
temp1 = data10['submittedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data10['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [token, hotelId])
data = cursor.fetchall()
data = data[0]
checkIn = data['checkIn']
checkOut = data['checkOut']
x = data['createdOn'].strftime("%y-%b-%d, %H:%M:%S")
z = x.split(",")[0]
y = x.split(",")[1]
x = z.split("-")
data['createdOn'] = x[2] + " " + x[1] + ", " + x[0] + " : " + y
email = session['email']
now = datetime.datetime.utcnow()
cursor.execute('SELECT * From requestLastOpened where id = %s && hotelId = %s', [token, hotelId])
check = cursor.fetchall()
if len(check) != 0:
data['lastOpenedOn'] = check[0]['time']
data['lastOpenedBy'] = check[0]['openedBy']
temp1 = data['lastOpenedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data['lastOpenedOn'] = x[2] + " " + x[1] + ", " + x[0]
string = ''
v = data['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data['paymentTerms'] = string
elif v.count('ac') > 0:
data['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data['paymentTerms'] = 'Prior To Arrival'
string = ''
v = data['formPayment']
if v != None:
if v.count('cq') > 0:
string += 'Cheque, '
if v.count('bt') > 0:
string += ' Bank Transfer, '
if v.count('cc') > 0:
string += 'Credit Card, '
data['formPayment'] = procArr2(data['formPayment'])
if data['comments'].isspace():
data['comments'] = ''
responseId = data['id'] + "R"
cursor.execute('SELECT * From response where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
data2 = cursor.fetchall()
tfoc = False
tcomm = False
data3 = []
lefttable = []
righttable = []
contract = ''
contractv = ''
declined = False
declinedMsg = False
nego = False
negoInformation = ''
canNegotiate = False
if len(data2) != 0:
data['groupCategory'] = data2[0]['groupCategory']
data2 = data2[0]
if (data2['foc'] != '0'):
tfoc = True
if (data2['commission'] != '0'):
tcomm = True
data2['formPayment'] = procArr2(data2['formPayment'])
string = ''
v = data2['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data2['paymentTerms'] = string
elif v.count('ac') > 0:
data2['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data2['paymentTerms'] = 'Prior To Arrival'
cursor.execute('SELECT submittedOn from responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
submittedOn = cursor.fetchall()
if submittedOn[0]['submittedOn'] == 'None':
cursor.execute('SELECT * From responseAvg where responseId = %s && hotelId = %s', [responseId, hotelId])
data3 = cursor.fetchall()
else:
submittedOn = submittedOn[0]['submittedOn']
cursor.execute('SELECT * From responseAvg where responseId = %s and submittedOn = %s && hotelId = %s', [responseId, submittedOn, hotelId])
data3 = cursor.fetchall()
data3 = data3[0]
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
submittedOn = cursor.fetchall()
if submittedOn[0]['submittedOn'] == 'None':
cursor.execute(
'SELECT * From responseDaywise where responseId = %s && hotelId = %s', [responseId, hotelId])
data4 = cursor.fetchall()
else:
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s', [responseId, submittedOn[0]['submittedOn'], hotelId])
data4 = cursor.fetchall()
lefttable = []
dataToCheck = []
righttable = {}
for d in data4:
righttable[d['date']] = []
for d in data4:
if d['date'] not in dataToCheck:
tempArr = {}
tempArr['date'] = d['date']
tempArr['currentOcc'] = d['currentOcc']
tempArr['discountId'] = d['discountId']
tempArr['forecast'] = d['forecast']
tempArr['groups'] = d['groups']
tempArr['leadTime'] = d['leadTime']
lefttable.append(tempArr)
dataToCheck.append(d['date'])
tArr = {}
tArr['occupancy'] = d['occupancy']
tArr['type'] = d['type']
tArr['count'] = d['count']
tArr['ratePerRoom'] = d['ratePerRoom']
righttable[d['date']].append(tArr)
cursor.execute(
'SELECT contract from response where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
contract = cursor.fetchall()
contract = contract[0]
cursor.execute('SELECT contract from contract where id = %s && hotelId = %s', [contract['contract'], hotelId])
contractv = cursor.fetchall()
if len(contractv) != 0:
contractv = contractv[0]['contract']
else:
contractv = ''
declined = False
declinedMsg = ""
if (data['status'] == statusval2):
endline = data2['expiryTime']
if (endline != None):
today = datetime.datetime.now()
if (today > endline):
cursor.execute(
'UPDATE request set status = %s where id = %s && hotelId = %s', [statusval9, data['id'], hotelId])
cursor.execute(
'SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [data['id'], hotelId])
email = session['email']
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expectedFare, negotiationReason, timesNegotiated, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse[
'foc'], prevresponse['commission'], prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse[
'negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval9, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], prevresponse['expectedFare'], prevresponse['negotiationReason'], prevresponse['timesNegotiated'], hotelId
])
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg[
'triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s',
[prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
cursor.execute(
'UPDATE response set status = %s where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [statusval9, data['id'], hotelId])
mysql.connection.commit()
declined = True
declinedMsg = "Time limit expired"
data['status'] = statusval9
data2['status'] = statusval9
temp1 = data2['submittedOn'].strftime('%y-%b-%d, %H:%M:%S')
x = temp1.split('-')
data2['submittedOn'] = x[2].split(",")[0] + " " + x[1] + "," + x[0] + " " + x[2].split(",")[1]
for d in lefttable:
y = d['date']
temp1 = d['date'].strftime('%Y-%b-%d-%A')
x = temp1.split('-')
x = x[3] + " : " + x[2] + " " + x[1] + "," + x[0]
d['date'] = x
for d in list(righttable):
y = d
temp1 = d.strftime('%Y-%b-%d-%A')
x = temp1.split('-')
d = x[3] + " : " + x[2] + " " + x[1] + "," + x[0]
righttable[d] = righttable[y]
for key,value in righttable.items():
for r in value:
if (r['type'] == 'foc'):
r['type'] = 'FOC'
return render_template('request/requestQuotedView.html', data = data, data2= data2, tfoc = tfoc, tcomm = tcomm, data3 = data3, lefttable = lefttable, righttable = righttable, data5 = data5, data6 = data6, data7 = data7, data8 = data8, contract = contract, contractv = contractv, declined = declined, declinedMsg = declinedMsg, nego = nego, negoInformation = negoInformation, canNegotiate = canNegotiate, data9 = data9, data10 = data10)
elif (status == statusval3 or ( status == statusval7 and ut.get('userSubType') != 'reservation')):
cursor.execute('select count from settingsNegotiation where hotelId = %s', [hotelId])
count = cursor.fetchall()
if len(count) != 0:
count = count[0]['count']
else:
count = 100 # no hard limit set so we're assuming 100 here
data8 = []
if (status == statusval7):
cursor.execute(
"SELECT * From review where requestId = %s && hotelId = %s", [token, hotelId])
data8 = cursor.fetchall()
data8 = data8[0]
temp1 = data8['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data8['time'] = x[2] + " " + x[1] + ", " + x[0]
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [token, hotelId])
data = cursor.fetchall()
data = data[0]
checkIn = data['checkIn']
checkOut = data['checkOut']
data['createdOn'] = data['createdOn'].strftime("%y/%b/%d, %H:%M:%S")
email = session['email']
now = datetime.datetime.utcnow()
cursor.execute('SELECT * From requestLastOpened where id = %s && hotelId = %s', [token, hotelId])
check = cursor.fetchall()
data['lastOpenedOn'] = check[0]['time']
data['lastOpenedBy'] = check[0]['openedBy']
temp1 = data['lastOpenedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data['lastOpenedOn'] = x[2] + " " + x[1] + ", " + x[0]
string = ''
v = data['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data['paymentTerms'] = string
elif v.count('ac') > 0:
data['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data['paymentTerms'] = 'Prior To Arrival'
string = ''
data['formPayment'] = procArr2(data['formPayment'])
if data['comments'].isspace():
data['comments'] = ''
responseId = data['id'] + "R"
cursor.execute('SELECT * From response where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
data2 = cursor.fetchall()
tfoc = False
tcomm = False
data3 = []
lefttable = []
righttable = []
contract = ''
contractv = ''
nego = False
negoInformation = ''
canNegotiate = False
fop = ''
pt = ''
if len(data2) != 0:
data['groupCategory'] = data2[0]['groupCategory']
data2 = data2[0]
if (data2['foc'] != '0'):
tfoc = True
if (data2['commission'] != '0'):
tcomm = True
string = ''
v = data2['formPayment']
fop = data2['formPayment']
data2['formPayment'] = procArr2(data2['formPayment'])
string = ''
v = data2['paymentTerms']
pt = data2['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data2['paymentTerms'] = string
elif v.count('ac') > 0:
data2['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data2['paymentTerms'] = 'Prior To Arrival'
cursor.execute('SELECT submittedOn from responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
submittedOn = cursor.fetchall()
if submittedOn[0]['submittedOn'] == 'None':
cursor.execute('SELECT * From responseAvg where responseId = %s && hotelId = %s', [responseId, hotelId])
data3 = cursor.fetchall()
else:
submittedOn = submittedOn[0]['submittedOn']
cursor.execute('SELECT * From responseAvg where responseId = %s and submittedOn = %s && hotelId = %s', [responseId, submittedOn, hotelId])
data3 = cursor.fetchall()
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
submittedOn = cursor.fetchall()
if submittedOn[0]['submittedOn'] == 'None':
cursor.execute(
'SELECT * From responseDaywise where responseId = %s && hotelId = %s', [responseId, hotelId])
data4 = cursor.fetchall()
else:
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s', [responseId, submittedOn[0]['submittedOn'], hotelId])
data4 = cursor.fetchall()
lefttable = []
dataToCheck = []
righttable = {}
for d in data4:
righttable[d['date']] = []
roomCount = 0
occFlag = False
for d in data4:
if d['date'] not in dataToCheck:
tempArr = {}
tempArr['date'] = d['date']
tempArr['currentOcc'] = d['currentOcc']
if (d['currentOcc']) != "-":
occFlag = True
tempArr['discountId'] = d['discountId']
tempArr['forecast'] = d['forecast']
tempArr['groups'] = d['groups']
tempArr['leadTime'] = d['leadTime']
lefttable.append(tempArr)
dataToCheck.append(d['date'])
tArr = {}
tArr['occupancy'] = d['occupancy']
tArr['type'] = d['type']
tArr['count'] = d['count']
roomCount += int(d['count'])
tArr['ratePerRoom'] = d['ratePerRoom']
righttable[d['date']].append(tArr)
cursor.execute(
'SELECT contract from response where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
contract = cursor.fetchall()
contract = contract[0]
single1 = []
single2 = []
double1 = []
double2 = []
triple1 = []
triple2 = []
quad1 = []
quad2 = []
single1c = 0
single2c = 0
double1c = 0
double2c = 0
triple1c = 0
triple2c = 0
quad1c = 0
quad2c = 0
foc1c = 0
foc2c = 0
single1f = False
double1f = False
triple1f = False
quad1f = False
single2f = False
double2f = False
triple2f = False
quad2f = False
foc1 = 0
foc2 = 0
roomCount = 0
for m in data4:
if (m['type'] != 'foc'):
roomCount += int(m['count'])
if (m['type'] == '1 Bed'):
if (m['occupancy'] == 'Single'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
single1.append(v)
single1c = single1c + int(m['count'])
single1f = True
elif (m['occupancy'] == 'Double'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
double1.append(v)
double1c = double1c + int(m['count'])
double1f = True
elif (m['occupancy'] == 'Triple'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
triple1.append(v)
triple1c = triple1c + int(m['count'])
triple1f = True
elif (m['occupancy'] == 'Quad'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
quad1.append(v)
quad1c = quad1c + int(m['count'])
quad1f = True
elif (m['type'] == '2 Bed'):
if (m['occupancy'] == 'Single'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
single2.append(v)
single2c = single2c + int(m['count'])
single2f = True
elif (m['occupancy'] == 'Double'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
double2.append(v)
double2c = double2c + int(m['count'])
double2f = True
elif (m['occupancy'] == 'Triple'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
triple2.append(v)
triple2c = triple2c + int(m['count'])
triple2f = True
elif (m['occupancy'] == 'Quad'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
quad2.append(v)
quad2c = quad2c + int(m['count'])
quad2f = True
elif (m['type'] == 'foc'):
if (m['occupancy'] == 'Single'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
foc1 += float(v)
foc1c = int(m['count'])
elif (m['occupancy'] == 'Double'):
if (m['ratePerRoom'] != -1):
s = m['ratePerRoom'].split('(')
v = float(s[0]) * int(m['count'])
foc2 += float(v)
foc2c = int(m['count'])
cursor.execute('SELECT contract from contract where id = %s && hotelId = %s', [contract['contract'], hotelId])
contractv = cursor.fetchall()
if len(contractv) != 0:
contractv = contractv[0]['contract']
else:
contractv = ''
cursor.execute('SELECT * from response where responseId = %s && status = %s && hotelId = %s', [responseId, statusval3, hotelId])
negoTime = cursor.fetchall()
negoTimes = len(negoTime)
nego = False
negoInformation = {}
canNegotiate = False
if (int(negoTimes) < int(count)):
canNegotiate = True
negoInformation['expectedFare'] = data2['expectedFare']
negoInformation['reason'] = data2['negotiationReason']
email = session['email']
cursor.execute('SELECT userType from hotelUsers where email = %s && hotelId = %s', [email, hotelId])
ut = cursor.fetchall()
review = True
if len(ut) != 0:
ut = ut[0]
if (ut['userType'] == "hotelAdmin" or ut['userType'] == "revenue"):
review = False
cursor.execute('SELECT * from contract where hotelId = %s', [hotelId])
contracts = cursor.fetchall()
for d in lefttable:
y = d['date']
temp1 = d['date'].strftime('%Y-%b-%d-%A')
x = temp1.split('-')
x = x[3] + " : " + x[2] + " " + x[1] + "," + x[0]
d['date'] = x
for d in list(righttable):
y = d
temp1 = d.strftime('%Y-%b-%d-%A')
x = temp1.split('-')
d = x[3] + " : " + x[2] + " " + x[1] + "," + x[0]
righttable[d] = righttable[y]
data3 = data3[0]
for key,value in righttable.items():
for r in value:
if (r['type'] == 'foc'):
r['type'] = 'FOC'
return render_template('request/requestEditableView.html', data = data, data2= data2, tfoc = tfoc, tcomm = tcomm, data3 = data3, lefttable = lefttable, righttable = righttable, data8 = data8, contract = contract, contractv = contractv, nego = nego, negoInformation = negoInformation, canNegotiate = canNegotiate, review = review, contracts = contracts, roomCount = roomCount, fop = fop, pt = pt, single1f = single1f, double1f = double1f, triple1f = triple1f, quad1f = quad1f, single2f = single2f, double2f = double2f, triple2f = triple2f, quad2f = quad2f, single1c = single1c, double1c = double1c, triple1c = triple1c, quad1c = quad1c, single2c = single2c, double2c = double2c, triple2c = triple2c, quad2c = quad2c, foc1c = foc1c, foc2c = foc2c, occFlag = occFlag)
@app.route('/showRequest1', methods = ['GET', 'POST'])
@is_logged_in
def showRequest1():
token = request.form['id']
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * FROM request where id = %s && hotelId = %s', [token, hotelId])
data = cursor.fetchall()
data = data[0]
checkIn = data['checkIn']
checkOut = data['checkOut']
data['createdOn'] = data['createdOn'].strftime("%d %b ,%y, %H:%M:%S")
cursor.execute('SELECT status from request where id = %s && hotelId = %s', [token, hotelId])
status = cursor.fetchall()
rvflag = False
rvvv = []
if (status[0]['status'] == statusval7):
cursor.execute('SELECT * From review where requestId = %s && hotelId = %s', [token, hotelId])
rvvv = cursor.fetchall()
rvvv = rvvv[0]
rvflag = True
email = session['email']
now = datetime.datetime.utcnow()
cursor.execute('SELECT * From requestLastOpened where id = %s && hotelId = %s', [token, hotelId])
check = cursor.fetchall()
if len(check) == 0:
cursor.execute('INSERT INTO requestLastOpened(id, time, openedBy, hotelId) VALUES (%s, %s, %s, %s)', [token, now ,email, hotelId]
)
data['lastOpenedOn'] = now
data['lastOpenedBy'] = email
else:
data['lastOpenedOn'] = check[0]['time']
data['lastOpenedBy'] = check[0]['openedBy']
cursor.execute('UPDATE requestLastOpened SET time = %s, openedBy = %s where id = %s && hotelId = %s', [now, email, token, hotelId])
temp1 = data['lastOpenedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data['lastOpenedOn'] = x[2] + " " + x[1] + ", " + x[0]
mysql.connection.commit()
string = ''
v = data['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data['paymentTerms'] = string
elif v.count('ac') > 0:
data['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data['paymentTerms'] = 'Prior To Arrival'
string = ''
data['formPayment'] = procArr2(data['formPayment'])
if data['comments'].isspace():
data['comments'] = ''
nights = data['nights']
curr_date = data['checkIn']
result = []
dates = []
discounts = []
lead = int(data['leadTime'])
occs = []
cursor.execute('SELECT * From room where hotelId = %s', [hotelId])
data7 = cursor.fetchall()
totalRooms = 0
for d in data7:
totalRooms += int(d['count'])
rates = []
tfoc = False
tfoc1 = 0
tfoc2 = 0
foc = []
if (data['foc'] != 0):
tfoc = True
tfoc1 = data['foc1']
tfoc2 = data['foc2']
mmp = 1
for i in range(0, int(nights)):
tempResult = []
cursor.execute('SELECT * FROM request1Bed where date = %s AND id = %s && hotelId = %s', [curr_date, token, hotelId])
resultPerDay1 = cursor.fetchall()
roomsToBook = 0
for r in resultPerDay1:
if (len(r) != 0):
dateToCheck = curr_date.strftime('%Y-%m-%d')
day = curr_date.strftime('%A')
day = day.lower()
query = "SELECT * FROM rate where (type = %s AND (startDate <= %s AND endDate >= %s) AND {} = 1 AND hotelId = %s)".format(day)
cursor.execute(query, ['1', dateToCheck, dateToCheck, hotelId])
pent = cursor.fetchall()
if (len(pent) == 0):
r['rate'] = -1
mmp = 0
else:
if r['occupancy'] == 'Single':
r['rate'] = pent[0]['sor']
elif r['occupancy'] == 'Double':
r['rate'] = pent[0]['dor']
elif r['occupancy'] == 'Triple':
r['rate'] = pent[0]['tor']
elif r['occupancy'] == 'Quad':
r['rate'] = pent[0]['qor']
r['type'] = '1 Bed'
tempResult.append(r)
roomsToBook += int(r['count'])
cursor.execute(
'SELECT * FROM request2Bed where date = %s AND id = %s && hotelId = %s', [curr_date, token, hotelId])
resultPerDay2 = cursor.fetchall()
for r in resultPerDay2:
if (len(r) != 0):
dateToCheck = curr_date.strftime('%Y-%m-%d')
day = curr_date.strftime('%A')
day = day.lower()
query = "SELECT * FROM rate where (type = %s AND (startDate <= %s AND endDate >= %s) AND {} = 1 AND hotelId = %s)".format(
day)
cursor.execute(query, ['2', dateToCheck, dateToCheck, hotelId])
pent = cursor.fetchall()
if (len(pent) == 0):
r['rate'] = -1
mmp = 0
else:
if r['occupancy'] == 'Single':
r['rate'] = pent[0]['sor']
elif r['occupancy'] == 'Double':
r['rate'] = pent[0]['dor']
elif r['occupancy'] == 'Triple':
r['rate'] = pent[0]['tor']
elif r['occupancy'] == 'Quad':
r['rate'] = pent[0]['qor']
r['type'] = '2 Bed'
tempResult.append(r)
roomsToBook += int(r['count'])
if (tfoc):
if (tfoc1 != '0'):
r = {}
dateToCheck = curr_date.strftime('%Y-%m-%d')
day = curr_date.strftime('%A')
day = day.lower()
query = "SELECT * FROM rate where (type = %s AND (startDate <= %s AND endDate >= %s) AND {} = 1 AND hotelId = %s)".format(
day)
cursor.execute(query, ['1', dateToCheck, dateToCheck, hotelId])
pent = cursor.fetchall()
r['foc1'] = tfoc1
r['type'] = 'FOC'
r['occupancy'] = 'Single'
r['count'] = tfoc1
if (len(pent) == 0):
r['rate'] = -1
else:
r['rate'] = pent[0]['sor']
foc.append(r)
tempResult.append(r)
roomsToBook += int(tfoc1)
if (tfoc2 != '0'):
r = {}
dateToCheck = curr_date.strftime('%Y-%m-%d')
day = curr_date.strftime('%A')
day = day.lower()
query = "SELECT * FROM rate where (type = %s AND (startDate <= %s AND endDate >= %s) AND {} = 1 AND hotelID = %s)".format(
day)
cursor.execute(query, ['2', dateToCheck, dateToCheck, hotelId])
pent = cursor.fetchall()
r['foc2'] = tfoc2
r['type'] = 'FOC'
r['occupancy'] = 'Double'
r['count'] = tfoc2
if (len(pent) == 0):
r['rate'] = -1
else:
r['rate'] = pent[0]['dor']
foc.append(r)
tempResult.append(r)
roomsToBook += int(tfoc2)
dateToCheck = curr_date.strftime('%Y-%m-%d')
occ = request.form.get(str(curr_date))
if occ == None:
occs.append("-")
cursor.execute('SELECT policyName from autopilot where startDate <= %s AND endDate >= %s AND active = 1 AND policy = "manual" && hotelId = %s', [curr_date, curr_date, hotelId])
pn = cursor.fetchall()
if len(pn) != 0:
discounts.append("0" + " (AutoPilot ID: " + pn[0]['policyName'] + ")")
else:
discounts.append('0' + "(Not OCC)")
for t in tempResult:
rates.append({'val': -1, 'count': t['count'], 'type' : 'no'})
t['rate'] = -1
else:
occ = int(occ)
pam = occ * totalRooms//100
occs.append(str(occ) + " (" + str(pam) + " Rooms )")
pam = pam + 1
minDiscountVal = 101
glid = 0
cursor.execute('SELECT discountId, defaultm from discountMap where startDate <= %s AND endDate >= %s AND active = 1 && hotelId = %s', [dateToCheck, dateToCheck, hotelId])
di = cursor.fetchall()
if len(di) == 0:
discounts.append('0' + "(No Discount Grid)")
else:
if len(di) == 1:
id = di[0]['discountId']
elif len(di) == 2:
for l in di:
if (l['defaultm'] == 0):
id = l['discountId']
break
for rv in range(pam, roomsToBook + pam):
cursor.execute('SELECT * from discount where discountId = %s AND (leadMin <= %s && leadMax >= %s) AND (roomMin <= %s && roomMax >= %s && hotelId = %s)', [id, lead, lead, rv, rv, hotelId])
dd = cursor.fetchall()
if len(dd) == 0:
discounts.append('0' + "(No Grid Fits)")
else:
glid = id
if dd[0]['value'] == '' or dd[0]['value'] == ' ':
dd[0]['value'] = '0'
dd[0]['value'] == dd[0]['value'].strip()
minDiscountVal = min(minDiscountVal, float(dd[0]['value']))
discounts.append(str(minDiscountVal) + " ( ID : " + str(glid) + " )")
for t in tempResult:
try:
te = int(t['rate'])
except e:
print(t['rate'])
if (te == -1):
rates.append({'val': -1, 'count': t['count'], 'type' : 'no'})
else:
if minDiscountVal == 101:
minDiscountVal = 0
val = te - (minDiscountVal * te)/100
rates.append({'val': val, 'count': t['count'], 'type': t['type']})
t['rate'] = str(val) + " (Evaluated Rate : " + str(val) + "[" + str(te) + "] )"
lead = lead + 1
dates.append(curr_date.strftime('%A : %d %b, %Y'))
result.append(tempResult)
curr_date = curr_date + datetime.timedelta(days = 1)
focv = 0
for r in rates:
if r['type'] == 'FOC':
focv += int(r['count']) * r['val']
totalRate = 0
for d in rates:
if (d['val'] == -1 or d['type'] == 'FOC'):
totalRate += 0
else:
totalRate += int(d['count']) * d['val']
focv = float(round(focv, 2))
totalRate = float(round(totalRate, 2))
totalQuote = totalRate
tcomm = False
tcommv = 0
comP = 0
if (data['commissionable'] != '0'):
vv = (totalRate * float(data['commissionable'])) / 100
comP = data['commissionable']
totalQuote += vv
tcomm = True
tcommv = vv
tcommv = float(round(tcommv, 2))
totalQuote += focv
totalQuote = int(round(totalQuote))
roomCount = 0
single1 = []
single2 = []
double1 = []
double2 = []
triple1 = []
triple2 = []
quad1 = []
quad2 = []
single1c = 0
single2c = 0
double1c = 0
double2c = 0
triple1c = 0
triple2c = 0
quad1c = 0
quad2c = 0
foc1c = 0
foc2c = 0
single1f = False
double1f = False
triple1f = False
quad1f = False
single2f = False
double2f = False
triple2f = False
quad2f = False
foc1 = 0
foc2 = 0
for r in result:
for m in r:
if (m['type'] != 'FOC'):
roomCount += int(m['count'])
if (m['type'] == '1 Bed'):
if (m['occupancy'] == 'Single'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
single1.append(v)
single1c = single1c + int(m['count'])
single1f = True
elif (m['occupancy'] == 'Double'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
double1.append(v)
double1c = double1c + int(m['count'])
double1f = True
elif (m['occupancy'] == 'Triple'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
triple1.append(v)
triple1c = triple1c + int(m['count'])
triple1f = True
elif (m['occupancy'] == 'Quad'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
quad1.append(v)
quad1c = quad1c + int(m['count'])
quad1f = True
elif (m['type'] == '2 Bed'):
if (m['occupancy'] == 'Single'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
single2.append(v)
single2c = single2c + int(m['count'])
single2f = True
elif (m['occupancy'] == 'Double'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
double2.append(v)
double2c = double2c + int(m['count'])
double2f = True
elif (m['occupancy'] == 'Triple'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
triple2.append(v)
triple2c = triple2c + int(m['count'])
triple2f = True
elif (m['occupancy'] == 'Quad'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
quad2.append(v)
quad2c = quad2c + int(m['count'])
quad2f = True
elif (m['type'] == 'FOC'):
if (m['occupancy'] == 'Single'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
foc1 += float(v)
foc1c = int(m['count'])
elif (m['occupancy'] == 'Double'):
if (m['rate'] != -1):
s = m['rate'].split(' (')
v = float(s[0]) * int(m['count'])
foc2 += float(v)
foc2c = int(m['count'])
foc1 = foc1 / roomCount
foc2 = foc2 / roomCount
tcommparts = tcommv / roomCount
le = single1c
single1avg = 0
if (le != 0):
sum = 0
for s in single1:
sum += float(s)
single1avg = sum / le
single1avg = single1avg + foc1 + foc2 + tcommparts
le = single2c
single2avg = 0
if (le != 0):
sum = 0
for s in single2:
sum += float(s)
single2avg = sum / le
single2avg = single2avg + foc1 + foc2 + tcommparts
le = double1c
double1avg = 0
if (le != 0):
sum = 0
for s in double1:
sum += float(s)
double1avg = sum / le
double1avg = double1avg + foc1 + foc2 + tcommparts
le = double2c
double2avg = 0
if (le != 0):
sum = 0
for s in double2:
sum += float(s)
double2avg = sum / le
double2avg = double2avg + foc1 + foc2 + tcommparts
le = triple1c
triple1avg = 0
if (le != 0):
sum = 0
for s in triple1:
sum += float(s)
triple1avg = sum / le
triple1avg = triple1avg + foc1 + foc2 + tcommparts
le = triple2c
triple2avg = 0
if (le != 0):
sum = 0
for s in triple2:
sum += float(s)
triple2avg = sum / le
triple2avg = triple2avg + foc1 + foc2 + tcommparts
le = quad1c
quad1avg = 0
if (le != 0):
sum = 0
for s in quad1:
sum += float(s)
quad1avg = sum / le
quad1avg = quad1avg + foc1 + foc2 + tcommparts
le = quad2c
quad2avg = 0
if (le != 0):
sum = 0
for s in quad2:
sum += float(s)
quad2avg = sum / le
quad2avg = quad2avg + foc1 + foc2 + tcommparts
# add foc to all equally
single1avg = round(single1avg, 2)
double1avg = round(double1avg, 2)
triple1avg = round(triple1avg, 2)
quad1avg = round(quad1avg, 2)
single2avg = round(single2avg, 2)
double2avg = round(double2avg, 2)
triple2avg = round(triple2avg, 2)
quad2avg = round(quad2avg, 2)
avgRate = str(round(totalQuote/roomCount, 2))
email = session['email']
cursor.execute('SELECT userType from hotelUsers where email = %s && hotelId = %s', [email, hotelId])
ut = cursor.fetchall()
review = True
if len(ut) != 0:
ut = ut[0]
if (ut['userType'] == "hotelAdmin" or ut['userType'] == "revenue"):
review = False
cursor.execute('SELECT * from contract where hotelId = %s', [hotelId])
contracts = cursor.fetchall()
cursor.execute('SELECT * From room where hotelId = %s', [hotelId])
roomData = cursor.fetchall()
negoF = False
fop = ''
pt = ''
cursor.execute('SELECT * From response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [token, hotelId])
responseData = cursor.fetchall()
if len(responseData) == 0:
cursor.execute('SELECT count from settingsNegotiation where hotelId = %s order by submittedOn desc', [hotelId])
nego = cursor.fetchall()
if len(nego) != 0:
nego = nego[0]
if (int(nego['count']) > 0):
negoF = True
else:
cursor.execute('SELECT * from response where requestId = %s &&status = %s && hotelId = %s', [token, statusval3, hotelId])
negoTime = cursor.fetchall()
negoTimes = len(negoTime)
cursor.execute('SELECT count from settingsNegotiation where hotelId = %s order by submittedOn desc', [hotelId])
count = cursor.fetchall()
if len(count) != 0:
count = count[0]['count']
else:
count = 100
if (int(negoTimes) < int(count)):
negoF = True
responseData = responseData[0]
string = ''
fop = responseData['formPayment']
responseData['formPayment'] = procArr2(responseData['formPayment'])
string = ''
v = responseData['paymentTerms']
pt = responseData['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
responseData['paymentTerms'] = string
elif v.count('ac') > 0:
responseData['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
responseData['paymentTerms'] = 'Prior To Arrival'
# get right side values
if (mmp == 0):
flash('No Rate Grid available (No OCC applicable as discount grid for this date range is not set)!', 'danger')
return render_template('request/requestProcess.html', data = data, result = result, length = len(result), dates = dates, discounts = discounts, occs = occs, totalRate = totalRate, avgRate = avgRate, tcomm = tcomm, tcommv = tcommv, totalQuote = totalQuote, tfoc = tfoc, focv = focv, comP = comP, roomCount = roomCount, checkIn = checkIn, checkOut = checkOut, single1avg = single1avg, single2avg = single2avg, double1avg = double1avg, double2avg = double2avg, triple1avg = triple1avg, triple2avg = triple2avg, quad1avg = quad1avg, quad2avg = quad2avg, single1f = single1f, double1f = double1f, triple1f = triple1f, quad1f = quad1f, single2f = single2f, double2f = double2f, triple2f = triple2f, quad2f = quad2f, single1c = single1c, double1c = double1c, triple1c = triple1c, quad1c = quad1c, single2c = single2c, double2c = double2c, triple2c = triple2c, quad2c = quad2c, foc1 = foc1, foc2 = foc2, review = review, rvflag = rvflag, rvvv = rvvv, contracts = contracts, negoF = negoF, roomData = roomData, responseData = responseData, fop = fop, pt = pt)
@app.route('/requestProcessDecline', methods=['GET', 'POST'])
@is_logged_in
def requestProcessDecline():
inp = request.json
cursor = mysql.connection.cursor()
responseId = inp['requestId'] + "R"
email = session['email']
now = datetime.datetime.utcnow()
hotelId = session.get('hotelId')
status = statusval8
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
inp['requestId'], responseId, inp['groupCategory'], inp['totalFare'], inp['foc'], str(inp['commission']), str(inp['commissionValue']), inp['totalQuote'], inp['cutoffDays'], procArr(
inp['formPayment']), inp['paymentTerms'], inp['paymentGtd'], inp['negotiable'], inp['checkIn'], inp['checkOut'], email, now,
status, inp['paymentDays'], inp['nights'], inp['comments'],
inp['averageRate'], inp['contract'], hotelId
])
table = inp['table_result']
for t in table:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
t['date'], t['currentOcc'], t['discountId'], t['occupancy'], t['type'], t[
'count'], t['ratePerRoom'], responseId, t['forecast'], t['leadTime'], t['groups'], now, hotelId
])
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
inp['single1'], inp['single2'], inp['double1'], inp['double2'], inp['triple1'], inp['triple2'], inp['quad1'], inp['quad2'], responseId, now, hotelId
])
cursor.execute(
'UPDATE request set status = %s where id = %s && hotelId = %s', [statusval8, inp['requestId'], hotelId])
cursor.execute(
'UPDATE response set status = %s where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [statusval8, inp['requestId'], hotelId])
now = datetime.datetime.utcnow()
email = session['email']
cursor.execute("INSERT INTO DeclineRequest(requestId, time, reason, declinedBy, hotelId) VALUES(%s, %s, %s, %s, %s) ", [
inp['requestId'], now, inp['reason'], email, hotelId])
mysql.connection.commit()
flash('The request has been declined', 'success')
return ('', 204)
@app.route('/requestProcessQuote', methods = ['GET', 'POST'])
@is_logged_in
def requestProcessQuote():
inp = request.json
cursor = mysql.connection.cursor()
responseId = inp['requestId'] + "R"
email = session['email']
now = datetime.datetime.utcnow()
status = statusval2
hotelId = session.get('hotelId')
cursor.execute('SELECT days from settingsTimelimit where hotelId = %s', [hotelId])
days = cursor.fetchall()
endline = datetime.datetime.now().date() + datetime.timedelta(days = 99)
if len(days) != 0:
days = days[0]
days = int(days['days'])
endline = datetime.datetime.now().date() + datetime.timedelta(days = days)
endline = datetime.datetime.combine(endline, datetime.datetime.min.time())
endline = endline + datetime.timedelta(hours = 23, minutes = 59)
table = inp['table_result']
check_final = False
for t in table:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
t['date'], t['currentOcc'], t['discountId'], t['occupancy'], t['type'], t['count'], t['ratePerRoom'], responseId, t['forecast'], t['leadTime'], t['groups'], now, hotelId
])
check = checkOverride(t['ratePerRoom'])
if(check == True):
check_final = True
if (check_final == True):
check_final = 1
else:
check_final = 0
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expiryTime, overrideReason, overrideFlag, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' , [
inp['requestId'], responseId, inp['groupCategory'], inp['totalFare'], inp['foc'], str(inp['commission']), str(inp['commissionValue']), inp['totalQuote'], inp['cutoffDays'], procArr(inp['formPayment']), inp['paymentTerms'], inp['paymentGtd'], inp['negotiable'], inp['checkIn'], inp['checkOut'], email, now,
status, inp['paymentDays'], inp['nights'], inp['comments'],
inp['averageRate'], inp['contract'], endline, inp['overres'], check_final, hotelId
])
cursor.execute("UPDATE request SET status = %s WHERE id = %s && hotelId = %s", [statusval2, inp['requestId'], hotelId])
cursor.execute('UPDATE response set status = %s where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [statusval2, inp['requestId'], hotelId]
)
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' , [
inp['single1'], inp['single2'], inp['double1'], inp['double2'], inp['triple1'], inp['triple2'], inp['quad1'], inp['quad2'], responseId, now, hotelId
])
mysql.connection.commit()
cursor.execute('SELECT createdFor from request where id = %s && hotelId = %s', [inp['requestId'], hotelId])
createdFor = cursor.fetchall()
createdFor = createdFor[0]['createdFor']
token = generateConfirmationToken(inp['requestId'])
cursor.execute('SELECT * from mapHotelId where hotelId = %s', [hotelId])
hotelData = cursor.fetchall()
hotelName = hotelData[0]['hotelName']
hotelPhone = hotelData[0]['phone']
default_email = hotelData[0]['default_email']
sendMailQ(
subjectv = hotelName + ' ' + inp['requestId'] + ' - Group Rates',
recipientsv=createdFor,
linkv = 'showQuoteEmail',
tokenv = token,
hotelId = hotelId,
hotelName = hotelName,
hotelPhone = hotelPhone,
default_email = default_email,
bodyv = 'Please Do Not Reply to this email, \n Hello, \n\n You have recieved a response to your group rate enquiry.',
)
flash('The request has been quoted', 'success')
return ('', 204)
@app.route('/showQuote/<id>', methods = ['GET', 'POST'])
@is_logged_in
def showQuote(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [id, hotelId])
data = cursor.fetchall()
data = data[0]
data['createdOn'] = data['createdOn'].strftime("%d %b ,%y, %H:%M:%S")
string = ''
v = data['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data['paymentTerms'] = string
elif v.count('ac') > 0:
data['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data['paymentTerms'] = 'Prior To Arrival'
string = ''
data['formPayment'] = procArr2(data['formPayment'])
if data['comments'].isspace():
data['comments'] = ''
responseId = data['id'] + "R"
cursor.execute(
'SELECT * From response where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
data2 = cursor.fetchall()
data2 = data2[0]
negcheck = data2['negotiable']
if negcheck == 0:
negcheck = False
else:
negcheck = True
string = ''
v = data2['formPayment']
data2['formPayment'] = procArr2(data2['formPayment'])
string = ''
v = data2['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data2['paymentTerms'] = string
elif v.count('ac') > 0:
data2['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data2['paymentTerms'] = 'Prior To Arrival'
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
data3 = cursor.fetchall()
data3 = data3[0]
result = {}
cursor.execute('SELECT * From request1Bed where id = %s && hotelId = %s', [id, hotelId])
temp1 = cursor.fetchall()
for t in temp1:
result[t['date']] = []
cursor.execute('SELECT * From request2Bed where id = %s && hotelId = %s', [id, hotelId])
temp2 = cursor.fetchall()
for t in temp2:
result[t['date']] = []
totalRooms = 0
roomCount = 0
for t in temp1:
tArr = {}
tArr['type'] = '1 Bed'
tArr['occupancy'] = t['occupancy']
tArr['count'] = t['count']
totalRooms += int(t['count'])
roomCount += int(t['count'])
result[t['date']].append(tArr)
for t in temp2:
tArr = {}
tArr['type'] = '2 Bed'
tArr['occupancy'] = t['occupancy']
tArr['count'] = t['count']
totalRooms += int(t['count'])
roomCount += int(t['count'])
result[t['date']].append(tArr)
dateButtons = result.keys()
secondresult = result
for r,v in secondresult.items():
for row in v:
type1 = row['type'].split(' ')[0]
occupancy = row['occupancy'].lower()
count = row['count']
if data['status'] == statusval8:
row['ratePerRoom'] = "-"
row['total'] = "-"
else:
search = occupancy + type1
query = "SELECT {} from responseAvg where responseId = %s && hotelId = %s".format(search)
cursor.execute(query, [responseId, hotelId])
sv = cursor.fetchall()
row['ratePerRoom'] = sv[0][search]
row['total'] = float(row['ratePerRoom']) * int(row['count'])
if data['foc'] != 0:
for key in secondresult:
row1 = {}
row2 = {}
row1['type'] = 'FOC'
if data['foc1'] != '0':
row1['count'] = data['foc1']
totalRooms += int(data['foc1'])
row1['occupancy'] = 'Single'
row1['ratePerRoom'] = "-"
row1['total'] = "-"
secondresult[key].append(row1)
if data['foc2'] != '0':
row2['count'] = data['foc2']
totalRooms += int(data['foc1'])
row2['occupancy'] = 'Double'
row2['ratePerRoom'] = "-"
row2['total'] = "-"
secondresult[key].append(row2)
data5 = []
if data2['status'] == statusval4:
cursor.execute('SELECT * from requestAccepted where requestId = %s && hotelId = %s', [id, hotelId])
data5 = cursor.fetchall()
data5 = data5[0]
temp1 = data5['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data5['time'] = x[2] + " " + x[1] + ", " + x[0]
data6 = []
if (data2['status'] == statusval5 or data2['status'] == statusval8):
cursor.execute("SELECT * From DeclineRequest where requestId = %s && hotelId = %s", [id, hotelId])
data6 = cursor.fetchall()
data6 = data6[0]
temp1 = data6['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data6['time'] = x[2] + " " + x[1] + ", " + x[0]
data9 = []
if (data2['status'] == statusval10):
cursor.execute('SELECT * From confirmRequest where requestId = %s && hotelId = %s', [id, hotelId])
data9 = cursor.fetchall()
data9 = data9[0]
temp1 = data9['submittedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data9['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
data10 = []
if (data2['status'] == statusval11):
cursor.execute('SELECT * From notConfirmRequest where requestId = %s && hotelId = %s', [id, hotelId])
data10 = cursor.fetchall()
data10 = data10[0]
temp1 = data10['submittedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data10['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
declined = False
declinedMsg = ""
endline = 0
if (data['status'] == statusval2):
endline = data2['expiryTime']
if (endline != None):
today = datetime.datetime.now()
if (today > endline):
cursor.execute(
'UPDATE request set status = %s where id = %s && hotelId = %s', [statusval9, data['id'], hotelId])
cursor.execute(
'SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [data['id'], hotelId])
email = session['email']
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expectedFare, negotiationReason, timesNegotiated, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse[
'foc'], prevresponse['commission'], prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse[
'negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval9, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], prevresponse['expectedFare'], prevresponse['negotiationReason'], prevresponse['timesNegotiated'], hotelId
])
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg[
'triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s',
[prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
cursor.execute(
'UPDATE response set status = %s where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [statusval9, data['id'], hotelId])
mysql.connection.commit()
declined = True
declinedMsg = "Time limit expired"
data['status'] = statusval9
data2['status'] = statusval9
temp1 = endline.strftime('%y-%b-%d, %H:%M:%S')
x = temp1.split('-')
endline = x[2].split(",")[0] + " " + x[1] + "," + x[0] + " " + x[2].split(",")[1]
cursor.execute('select count from settingsNegotiation where hotelId = %s', [hotelId])
count = cursor.fetchall()
if len(count) != 0:
count = count[0]['count']
else:
count = 100 # no hard limit so
cursor.execute('SELECT * from response where responseId = %s and status = %s && hotelId = %s', [responseId, statusval3, hotelId])
negoTime = cursor.fetchall()
negoTimes = len(negoTime)
nego = False
negoInformation = {}
canNegotiate = False
if (int(negoTimes) <= int(count)):
canNegotiate = True
negoInformation['expectedFare'] = data2['expectedFare']
negoInformation['reason'] = data2['negotiationReason']
canNegotiate = canNegotiate and negcheck
cursor.execute('SELECT contract, id from contract where id = %s && hotelId = %s', [
data2['contract'], hotelId])
contract = cursor.fetchall()
if (data2['cutoffDays'] != None and data2['cutoffDays'] != ''):
cutoff = data2['submittedOn'] + datetime.timedelta(days = int(data2['cutoffDays']))
temp1 = cutoff.strftime('%y-%b-%d, %H:%M:%S')
x = temp1.split('-')
cutoff = x[2].split(",")[0] + " " + x[1] + "," + x[0] + " " + x[2].split(",")[1]
data2['cutoffDays'] = cutoff
temp1 = data2['submittedOn'].strftime('%y-%b-%d, %H:%M:%S')
x = temp1.split('-')
data2['submittedOn'] = x[2].split(",")[0] + " " + x[1] + "," + x[0] + " " + x[2].split(",")[1]
temp1 = data['checkIn'].strftime('%y-%b-%d')
x = temp1.split('-')
data['checkIn'] = x[2] + " " + x[1] + ", " + x[0]
temp1 = data['checkOut'].strftime('%y-%b-%d')
x = temp1.split('-')
data['checkOut'] = x[2] + " " + x[1] + ", " + x[0]
for d in list(dateButtons):
y = d
temp1 = d.strftime('%Y-%b-%d-%A')
x = temp1.split('-')
d = x[3] + " : " + x[2] + " " + x[1] + "," + x[0]
result[d] = result[y]
del result[y]
dateButtons = result.keys()
avgRate = int(data2['totalQuote']) / int(roomCount)
avgRate = round(avgRate, 2)
return render_template('request/showQuote.html', data = data, data2 = data2, data3 = data3, dateButtons = dateButtons, result = result, secondresult = secondresult, data5 = data5, data6 = data6, contract = contract, declined = declined, declinedMsg = declinedMsg, canNegotiate = canNegotiate, negoInformation = negoInformation, data9 = data9, data10 = data10, endline = endline, totalRooms = totalRooms, customer = False, avgRate = avgRate)
@app.route('/showQuoteEmail/<id>', methods = ['GET', 'POST'])
def showQuoteEmail(id):
id = confirmToken(id)
hotelId = request.args.get('hotelId')
if (id == False):
flash('Unverified', 'danger')
return render_template('login.html', title = 'Login')
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [id, hotelId])
data = cursor.fetchall()
data = data[0]
data['createdOn'] = data['createdOn'].strftime("%d %b ,%y, %H:%M:%S")
string = ''
v = data['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data['paymentTerms'] = string
elif v.count('ac') > 0:
data['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data['paymentTerms'] = 'Prior To Arrival'
string = ''
data['formPayment'] = procArr2(data['formPayment'])
if data['comments'].isspace():
data['comments'] = ''
responseId = data['id'] + "R"
cursor.execute(
'SELECT * From response where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
data2 = cursor.fetchall()
data2 = data2[0]
negcheck = data2['negotiable']
if negcheck == 0:
negcheck = False
else:
negcheck = True
string = ''
data2['formPayment'] = procArr2(data2['formPayment'])
string = ''
v = data2['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data2['paymentTerms'] = string
elif v.count('ac') > 0:
data2['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data2['paymentTerms'] = 'Prior To Arrival'
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
data3 = cursor.fetchall()
data3 = data3[0]
result = {}
cursor.execute('SELECT * From request1Bed where id = %s && hotelId = %s', [id, hotelId])
temp1 = cursor.fetchall()
for t in temp1:
result[t['date']] = []
cursor.execute('SELECT * From request2Bed where id = %s && hotelId = %s', [id, hotelId])
temp2 = cursor.fetchall()
for t in temp2:
result[t['date']] = []
totalRooms = 0
roomCount = 0
for t in temp1:
tArr = {}
tArr['type'] = '1 Bed'
tArr['occupancy'] = t['occupancy']
tArr['count'] = t['count']
totalRooms += int(t['count'])
roomCount += int(t['count'])
result[t['date']].append(tArr)
for t in temp2:
tArr = {}
tArr['type'] = '2 Bed'
tArr['occupancy'] = t['occupancy']
tArr['count'] = t['count']
totalRooms += int(t['count'])
roomCount += int(t['count'])
result[t['date']].append(tArr)
dateButtons = result.keys()
secondresult = result
for r,v in secondresult.items():
for row in v:
type1 = row['type'].split(' ')[0]
occupancy = row['occupancy'].lower()
count = row['count']
if data['status'] == statusval8:
row['ratePerRoom'] = "-"
row['total'] = "-"
else:
search = occupancy + type1
query = "SELECT {} from responseAvg where responseId = %s && hotelId = %s".format(search)
cursor.execute(query, [responseId, hotelId])
sv = cursor.fetchall()
row['ratePerRoom'] = sv[0][search]
row['total'] = float(row['ratePerRoom']) * int(row['count'])
if data['foc'] != 0:
for key in secondresult:
row1 = {}
row2 = {}
row1['type'] = 'foc'
row2['type'] = 'foc'
if data['foc1'] != '0':
row1['count'] = data['foc1']
totalRooms += int(data['foc1'])
row1['occupancy'] = 'Single'
row1['ratePerRoom'] = "-"
row1['total'] = "-"
secondresult[key].append(row1)
if data['foc2'] != '0':
row2['count'] = data['foc2']
totalRooms += int(data['foc1'])
row2['occupancy'] = 'Double'
row2['ratePerRoom'] = "-"
row2['total'] = "-"
secondresult[key].append(row2)
data5 = []
if data2['status'] == statusval4:
cursor.execute('SELECT * from requestAccepted where requestId = %s && hotelId = %s', [id, hotelId])
data5 = cursor.fetchall()
data5 = data5[0]
temp1 = data5['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data5['time'] = x[2] + " " + x[1] + ", " + x[0]
data6 = []
if (data2['status'] == statusval5 or data2['status'] == statusval8):
cursor.execute("SELECT * From DeclineRequest where requestId = %s && hotelId = %s", [id, hotelId])
data6 = cursor.fetchall()
data6 = data6[0]
temp1 = data6['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data6['time'] = x[2] + " " + x[1] + ", " + x[0]
data9 = []
if (data2['status'] == statusval10):
cursor.execute('SELECT * From confirmRequest where requestId = %s && hotelId = %s', [id, hotelId])
data9 = cursor.fetchall()
data9 = data9[0]
temp1 = data9['submittedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data9['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
data10 = []
if (data2['status'] == statusval11):
cursor.execute('SELECT * From notConfirmRequest where requestId = %s && hotelId = %s', [id, hotelId])
data10 = cursor.fetchall()
data10 = data10[0]
temp1 = data10['submittedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data10['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
declined = False
declinedMsg = ""
endline = 0
if (data['status'] == statusval2):
endline = data2['expiryTime']
if (endline != None):
today = datetime.datetime.now()
if (today > endline):
cursor.execute(
'UPDATE request set status = %s where id = %s && hotelId = %s', [statusval9, data['id'], hotelId])
cursor.execute(
'SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [data['id'], hotelId])
email = session['email']
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expectedFare, negotiationReason, timesNegotiated, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse[
'foc'], prevresponse['commission'], prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse[
'negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval9, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], prevresponse['expectedFare'], prevresponse['negotiationReason'], prevresponse['timesNegotiated'], hotelId
])
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg[
'triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s',
[prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
cursor.execute(
'UPDATE response set status = %s where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [statusval9, data['id'], hotelId])
mysql.connection.commit()
declined = True
declinedMsg = "Time limit expired"
data['status'] = statusval9
data2['status'] = statusval9
temp1 = endline.strftime('%y-%b-%d, %H:%M:%S')
x = temp1.split('-')
endline = x[2].split(",")[0] + " " + x[1] + "," + x[0] + " " + x[2].split(",")[1]
cursor.execute('select count from settingsNegotiation where hotelId = %s', [hotelId])
count = cursor.fetchall()
if len(count) != 0:
count = count[0]['count']
else:
count = 100 # no hard limit so
cursor.execute('SELECT * from response where responseId = %s and status = %s && hotelId = %s', [responseId, statusval3, hotelId])
negoTime = cursor.fetchall()
negoTimes = len(negoTime)
nego = False
negoInformation = {}
canNegotiate = False
if (int(negoTimes) <= int(count)):
canNegotiate = True
negoInformation['expectedFare'] = data2['expectedFare']
negoInformation['reason'] = data2['negotiationReason']
canNegotiate = canNegotiate and negcheck
cursor.execute('SELECT contract, id from contract where id = %s && hotelId = %s', [
data2['contract'], hotelId])
contract = cursor.fetchall()
if (data2['cutoffDays'] != None and data2['cutoffDays'] != ''):
cutoff = data2['submittedOn'] + datetime.timedelta(days = int(data2['cutoffDays']))
temp1 = cutoff.strftime('%y-%b-%d, %H:%M:%S')
x = temp1.split('-')
cutoff = x[2].split(",")[0] + " " + x[1] + "," + x[0] + " " + x[2].split(",")[1]
data2['cutoffDays'] = cutoff
temp1 = data2['submittedOn'].strftime('%y-%b-%d, %H:%M:%S')
x = temp1.split('-')
data2['submittedOn'] = x[2].split(",")[0] + " " + x[1] + "," + x[0] + " " + x[2].split(",")[1]
temp1 = data['checkIn'].strftime('%y-%b-%d')
x = temp1.split('-')
data['checkIn'] = x[2] + " " + x[1] + ", " + x[0]
temp1 = data['checkOut'].strftime('%y-%b-%d')
x = temp1.split('-')
data['checkOut'] = x[2] + " " + x[1] + ", " + x[0]
for d in list(dateButtons):
y = d
temp1 = d.strftime('%Y-%b-%d-%A')
x = temp1.split('-')
d = x[3] + " : " + x[2] + " " + x[1] + "," + x[0]
result[d] = result[y]
del result[y]
dateButtons = result.keys()
avgRate = int(data2['totalQuote']) / int(roomCount)
avgRate = round(avgRate, 2)
return render_template('request/showQuote.html', data = data, data2 = data2, data3 = data3, dateButtons = dateButtons, result = result, secondresult = secondresult, data5 = data5, data6 = data6, contract = contract, declined = declined, declinedMsg = declinedMsg, canNegotiate = canNegotiate, negoInformation = negoInformation, data9 = data9, data10 = data10, endline = endline, totalRooms = totalRooms, customer = True, avgRate = avgRate, hotelId = hotelId)
@app.route('/deleteRequest/<id>', methods = ['GET', 'POST'])
@is_logged_in
def deleteRequest(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT status from request where id = %s && hotelId = %s', [id, hotelId])
status = cursor.fetchall()
if (status[0]['status'] == statusval2) or (status[0]['status'] == statusval4) or (status[0]['status'] == statusval5 or status[0]['status'] == statusval7 or status[0]['status'] == statusval3 or status[0]['status'] == statusval8 or status[0]['status'] == statusval10 or status[0]['status'] == statusval11):
data5 = []
if (status[0]['status'] == statusval4):
cursor.execute(
'SELECT * From requestAccepted where requestId = %s && hotelId = %s', [id, hotelId])
data5 = cursor.fetchall()
data5 = data5[0]
temp1 = data5['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data5['time'] = x[2] + " " + x[1] + ", " + x[0]
data6 = []
if (status[0]['status'] == statusval5):
cursor.execute(
"SELECT * From DeclineRequest where requestId = %s && hotelId = %s", [id, hotelId])
data6 = cursor.fetchall()
data6 = data6[0]
temp1 = data6['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data6['time'] = x[2] + " " + x[1] + ", " + x[0]
data8 = []
if (status[0]['status'] == statusval7):
cursor.execute(
"SELECT * From review where requestId = %s && hotelId = %s", [id, hotelId])
data8 = cursor.fetchall()
data8 = data8[0]
temp1 = data8['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data8['time'] = x[2] + " " + x[1] + ", " + x[0]
data9 = []
if (status[0]['status'] == statusval10):
cursor.execute('SELECT * From confirmRequest where requestId = %s && hotelId = %s', [id, hotelId])
data9 = cursor.fetchall()
data9 = data9[0]
temp1 = data9['submittedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data9['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
data10 = []
if (status[0]['status'] == statusval11):
cursor.execute('SELECT * From notConfirmRequest where requestId = %s && hotelId = %s', [id, hotelId])
data10 = cursor.fetchall()
data10 = data10[0]
temp1 = data10['submittedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data10['submittedOn'] = x[2] + " " + x[1] + ", " + x[0]
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [id, hotelId])
data = cursor.fetchall()
data = data[0]
checkIn = data['checkIn']
checkOut = data['checkOut']
data['createdOn'] = data['createdOn'].strftime("%d %b ,%y, %H:%M:%S")
email = session['email']
now = datetime.datetime.utcnow()
cursor.execute(
'SELECT * From requestLastOpened where id = %s && hotelId = %s', [id, hotelId])
check = cursor.fetchall()
data['lastOpenedOn'] = check[0]['time']
data['lastOpenedBy'] = check[0]['openedBy']
temp1 = data['lastOpenedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data['lastOpenedOn'] = x[2] + " " + x[1] + ", " + x[0]
string = ''
v = data['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data['paymentTerms'] = string
elif v.count('ac') > 0:
data['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data['paymentTerms'] = 'Prior To Arrival'
string = ''
data['formPayment'] = procArr2(data['formPayment'])
if data['comments'].isspace():
data['comments'] = ''
responseId = data['id'] + "R"
cursor.execute(
'SELECT * From response where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
data2 = cursor.fetchall()
tfoc = False
tcomm = False
data3 = []
lefttable = []
righttable = []
if len(data2) != 0:
data['groupCategory'] = data2[0]['groupCategory']
data2 = data2[0]
if (data2['foc'] != '0'):
tfoc = True
tcomm = True
if (data2['commission'] != '0'):
tcomm = True
string = ''
data2['formPayment'] = procArr2(data2['formPayment'])
string = ''
v = data2['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data2['paymentTerms'] = string
elif v.count('ac') > 0:
data2['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data2['paymentTerms'] = 'Prior To Arrival'
cursor.execute('SELECT submittedOn from responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
submittedOn = cursor.fetchall()
if submittedOn[0]['submittedOn'] == 'None':
submittedOn = submittedOn[0]['submittedOn']
cursor.execute('SELECT * From responseAvg where responseId = %s', [responseId])
data3 = cursor.fetchall()
else:
cursor.execute('SELECT * From responseAvg where responseId = %s and submittedOn = %s && hotelId = %s', [responseId, submittedOn[0]['submittedOn'], hotelId])
data3 = cursor.fetchall()
data3 = data3[0]
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
submittedOn = cursor.fetchall()
if submittedOn[0]['submittedOn'] == 'None':
submittedOn = submittedOn[0]['submittedOn']
cursor.execute(
'SELECT * From responseDaywise where responseId = %s && hotelId = %s', [responseId, hotelId])
data4 = cursor.fetchall()
else:
cursor.execute(
'SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s', [responseId, submittedOn[0]['submittedOn'], hotelId])
data4 = cursor.fetchall()
lefttable = []
dataToCheck = []
righttable = {}
for d in data4:
righttable[d['date']] = []
for d in data4:
if d['date'] not in dataToCheck:
tempArr = {}
tempArr['date'] = d['date']
tempArr['currentOcc'] = d['currentOcc']
tempArr['discountId'] = d['discountId']
tempArr['forecast'] = d['forecast']
tempArr['groups'] = d['groups']
tempArr['leadTime'] = d['leadTime']
lefttable.append(tempArr)
dataToCheck.append(d['date'])
tArr = {}
tArr['occupancy'] = d['occupancy']
tArr['type'] = d['type']
tArr['count'] = d['count']
tArr['ratePerRoom'] = d['ratePerRoom']
righttable[d['date']].append(tArr)
for d in lefttable:
y = d['date']
temp1 = d['date'].strftime('%y-%b-%d')
x = temp1.split('-')
x = x[2] + " " + x[1] + "," + x[0]
d['date'] = x
for d in list(righttable):
y = d
temp1 = d.strftime('%y-%b-%d')
x = temp1.split('-')
d = x[2] + " " + x[1] + "," + x[0]
righttable[d] = righttable[y]
deleteflag = True
for key, value in righttable.items():
for r in value:
if (r['type'] == 'foc'):
r['type'] = "FOC"
cursor.execute('SELECT contract from contract where id = %s && hotelId = %s', [data2['contract'], hotelId])
contractv = cursor.fetchall()
if len(contractv) != 0:
contractv = contractv[0]['contract']
else:
contractv = ''
x = data2['submittedOn'].strftime('%y-%b-%d, %H:%M:%S')
x = x.split("-")
data2['submittedOn'] = x[2].split(",")[0] + " " + x[1] + "," + x[0] + " " + x[2].split(",")[1]
return render_template('request/requestQuotedView.html', data=data, data2=data2, tfoc=tfoc, tcomm=tcomm, data3=data3, lefttable=lefttable, righttable=righttable, data5=data5, data6=data6, deleteflag = deleteflag, data8 = data8, data9 = data9, data10 = data10, contractv = contractv)
elif (status[0]['status'] == statusval1):
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [id, hotelId])
data = cursor.fetchall()
data = data[0]
checkIn = data['checkIn']
checkOut = data['checkOut']
data['createdOn'] = data['createdOn'].strftime("%d %b ,%y, %H:%M:%S")
email = session['email']
now = datetime.datetime.utcnow()
cursor.execute(
'SELECT * From requestLastOpened where id = %s && hotelId = %s', [id, hotelId])
check = cursor.fetchall()
if len(check) != 0:
data['lastOpenedOn'] = check[0]['time']
data['lastOpenedBy'] = check[0]['openedBy']
temp1 = data['lastOpenedOn'].strftime('%y-%b-%d')
x = temp1.split('-')
data['lastOpenedOn'] = x[2] + " " + x[1] + ", " + x[0]
else:
data['lastOpenedOn'] = ''
data['lastOpenedBy'] = ''
string = ''
v = data['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
data['paymentTerms'] = string
elif v.count('ac') > 0:
data['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
data['paymentTerms'] = 'Prior To Arrival'
string = ''
v = data['formPayment']
data['formPayment'] = procArr2(data['formPayment'])
if data['comments'].isspace():
data['comments'] = ''
return render_template('request/deleteRequest.html', data=data)
@app.route('/DeleteRequest2', methods = ['GET', 'POST'])
@is_logged_in
def DeleteRequest2():
inp = request.json
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('UPDATE request set status = %s where id = %s && hotelId = %s', [statusval6, inp['id'], hotelId])
cursor.execute('SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [inp['id'], hotelId])
email = session['email']
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse['foc'],prevresponse['commission'],prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse['negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval6, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], hotelId
])
cursor.execute('SELECT * From responseAvg where responseId = %s &&hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg['triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s &&hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s &&hotelId = %s', [prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
cursor.execute("INSERT INTO deletedRequest(requestId, time, reason, deletedBy, hotelId) VALUES(%s, %s, %s, %s, %s) ", [inp['id'], now, inp['reason'], email, hotelId])
mysql.connection.commit()
cursor.close()
flash('The request has been deleted', 'success')
return ('', 204)
@app.route('/NegotiateRequest', methods = ['GET', 'POST'])
def NegotiateRequest():
inp = request.json
hotelId = inp['hotelId']
cursor = mysql.connection.cursor()
cursor.execute('SELECT timesNegotiated from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [inp['id'], hotelId])
dd = cursor.fetchall()
dd = dd[0]
times = int(dd['timesNegotiated']) + 1
#here
cursor.execute('UPDATE request set status = %s where id = %s && hotelId = %s', [statusval3, inp['id'], hotelId])
cursor.execute(
'SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [inp['id'], hotelId])
try:
email = session.get('email')
except:
cursor.execute('SELECT createdFor from request where id = %s && hotelId = %s', [inp['requestId'], hotelId])
createdFor = cursor.fetchall()
createdFor = createdFor[0]['createdFor']
email = createdFor
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expectedFare, negotiationReason, timesNegotiated, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse[
'foc'], prevresponse['commission'], prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse[
'negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval3, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], inp['expectedFare'], inp['reason'], times, hotelId
])
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg[
'triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s',
[prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
mysql.connection.commit()
flash('The request is sent for negotiation', 'success')
return ('', 204)
@app.route('/AcceptRequest', methods = ['GET', 'POST'])
def AcceptRequest():
inp = request.json
cursor = mysql.connection.cursor()
hotelId = inp['hotelId']
cursor.execute(
'SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [inp['id'], hotelId])
try:
email = session.get('email')
except:
cursor.execute('SELECT createdFor from request where id = %s && hotelId = %s', [inp['requestId'], hotelId])
createdFor = cursor.fetchall()
createdFor = createdFor[0]['createdFor']
email = createdFor
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expectedFare, negotiationReason, timesNegotiated, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse[
'foc'], prevresponse['commission'], prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse[
'negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval4, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], prevresponse['expectedFare'], prevresponse['negotiationReason'], prevresponse['timesNegotiated'], hotelId
])
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg[
'triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s',
[prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
cursor.execute('INSERT INTO requestAccepted(requestId, time, hotelId) VALUES(%s, %s, %s)', [inp['id'], now, hotelId])
cursor.execute('UPDATE request set status = %s where id = %s && hotelId = %s', [statusval4, inp['id'], hotelId])
cursor.execute('SELECT createdFor from request where id = %s && hotelId = %s', [inp['id'], hotelId])
createdFor = cursor.fetchall()
createdFor = createdFor[0]['createdFor']
if prevresponse['paymentGtd'] == 1:
with app.open_resource('static/docs/CC_Gurantee_Form.pdf') as fp:
msg = Message(
'Payment Guarantee',
sender = 'no-reply@trompar.com',
recipients= [createdFor],
)
msg.body = 'Kindly guarantee payment by filling this form'
msg.attach(
"PaymentGuarantee.pdf", "application/pdf", fp.read()
)
mail.send(msg)
mysql.connection.commit()
cursor.close()
flash('The request has been accepted', 'success')
return ('', 204)
@app.route('/DeclineRequest', methods = ['GET', 'POST'])
def DeclineRequest():
inp = request.json
cursor = mysql.connection.cursor()
hotelId = inp['hotelId']
cursor.execute('UPDATE request set status = %s where id = %s && hotelId = %s', [statusval5, inp['id'], hotelId])
cursor.execute(
'SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [inp['id'], hotelId])
try:
email = session.get('email')
except:
cursor.execute('SELECT createdFor from request where id = %s && hotelId = %s', [inp['requestId'], hotelId])
createdFor = cursor.fetchall()
createdFor = createdFor[0]['createdFor']
email = createdFor
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expectedFare, negotiationReason, timesNegotiated, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse[
'foc'], prevresponse['commission'], prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse[
'negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval5, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], prevresponse[
'expectedFare'], prevresponse['negotiationReason'], prevresponse['timesNegotiated'], hotelId
])
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg[
'triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s',
[prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
now = datetime.datetime.utcnow()
cursor.execute("INSERT INTO DeclineRequest(requestId, time, reason, declinedBy, hotelId) VALUES(%s, %s, %s, %s, %s) ", [inp['id'], now, inp['reason'], inp['declinedBy'], hotelId])
mysql.connection.commit()
cursor.close()
flash('The request has been declined', 'success')
return ('', 204)
@app.route('/requestProcessReview', methods = ['GET', 'POST'])
@is_logged_in
def requestProcessReview():
inp = request.json
cursor = mysql.connection.cursor()
responseId = inp['requestId'] + "R"
email = session['email']
now = datetime.datetime.utcnow()
status = statusval7
hotelId = session.get('hotelId')
table = inp['table_result']
check_final = False
for t in table:
print(t['date'])
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
t['date'], t['currentOcc'], t['discountId'], t['occupancy'], t['type'], t['count'], t['ratePerRoom'], responseId, t['forecast'], t['leadTime'], t['groups'], now, hotelId
])
check = checkOverride(t['ratePerRoom'])
if(check == True):
check_final = True
if (check_final == True):
check_final = 1
else:
check_final = 0
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, overrideReason, overrideFlag, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' , [
inp['requestId'], responseId, inp['groupCategory'], inp['totalFare'], inp['foc'], str(inp['commission']), str(inp['commissionValue']), inp['totalQuote'], inp['cutoffDays'], procArr(inp['formPayment']), inp['paymentTerms'], inp['paymentGtd'], inp['negotiable'], inp['checkIn'], inp['checkOut'], email, now,
status, inp['paymentDays'], inp['nights'], inp['comments'],
inp['averageRate'], inp['contract'], inp['overres'], check_final, hotelId
])
cursor.execute("UPDATE request SET status = %s WHERE id = %s && hotelId = %s", [statusval7, inp['requestId'], hotelId])
cursor.execute('UPDATE response set status = %s where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [statusval7, inp['requestId'], hotelId]
)
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)' , [
inp['single1'], inp['single2'], inp['double1'], inp['double2'], inp['triple1'], inp['triple2'], inp['quad1'], inp['quad2'], responseId, now, hotelId
])
cursor.execute('INSERT INTO review(requestId, sentBy, time, hotelId) VALUES(%s, %s, %s, %s)', [inp['requestId'], email, now, hotelId])
mysql.connection.commit()
flash("The request has been sent for review", 'success')
return ('', 204)
@app.route('/requestHistory/<id>', methods = ['GET', 'POST'])
@is_logged_in
def requestHistory(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [id, hotelId])
requestData = cursor.fetchall()
requestData = requestData[0]
requestData['formPayment'] = procArr2(requestData['paymentTerms'])
if requestData['comments'].isspace():
requestData['comments'] = ''
cursor.execute('SELECT * From response where requestId = %s && hotelId = %s', [id, hotelId])
responseData = cursor.fetchall()
data6 = []
for r in responseData:
v = r['paymentTerms']
if v != None:
if v.count('pc') > 0:
string = 'Post Checkout'
r['paymentTerms'] = string
elif v.count('ac') > 0:
r['paymentTerms'] = 'At Checkout'
elif v.count('poa') > 0:
r['paymentTerms'] = 'Prior To Arrival'
string = ''
r['formPayment'] = procArr2(r['formPayment'])
if r['comments'].isspace():
r['comments'] = ''
if (r['status'] == statusval5 or r['status'] == statusval8):
cursor.execute("SELECT * From DeclineRequest where requestId = %s && hotelId = %s", [id, hotelId])
data6 = cursor.fetchall()
data6 = data6[0]
r['msg'] = data6['reason']
r['by'] = data6['declinedBy']
r['time'] = data6['time']
temp1 = data6['time'].strftime('%y-%b-%d')
x = temp1.split('-')
data6['time'] = x[2] + " " + x[1] + ", " + x[0]
responseId = id + "R"
cursor.execute('SELECT * From responseAvg where responseId = %s && hotelId = %s', [responseId, hotelId])
responseAvgData = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s && hotelId = %s ', [responseId, hotelId])
responseDaywiseData = cursor.fetchall()
tempdict = {}
for row in responseDaywiseData:
tempdict[row['submittedOn']] = []
for r in responseDaywiseData:
tempdict[r['submittedOn']].append(r)
responseOverReason = []
responseDaywiseData = tempdict
finalresult = []
for key, value in responseDaywiseData.items():
tdict = {}
for r in value:
try:
if (r['date'] in tdict):
r['total'] = int(r['count']) * float(r['ratePerRoom'].split('(')[0])
tdict[r['date']].append(r)
else:
r['total'] = int(r['count']) * float(r['ratePerRoom'].split("(")[0])
tdict[r['date']] = [r]
except:
r['total'] = "-"
tdict[r['date']] = [r]
finalresult.append(tdict)
for d in finalresult:
for m in list(d):
y = m
temp1 = m.strftime('%y-%b-%d')
x = temp1.split('-')
z = x[2] + " " + x[1] + "," + x[0]
d[z] = d[m]
del d[m]
responseDaywiseData = finalresult
return render_template('request/showHistory.html', requestData = requestData, responseData = responseData, responseAvgData = responseAvgData, responseDaywiseData = responseDaywiseData, data6 = data6, responseOverReason = responseOverReason)
@app.route('/confirmRequest/<token>', methods = ['GET', 'POST'])
@is_logged_in
def confirmRequest(token):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [token, hotelId])
requestData = cursor.fetchall()
requestData = requestData[0]
cursor.execute('SELECT * From requestAccepted where requestId = %s && hotelId = %s', [token, hotelId])
acceptedOn = cursor.fetchall()
acceptedOn = acceptedOn[0]['time']
cursor.execute('SELECT totalQuote from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [token, hotelId])
totalQuote = cursor.fetchall()
totalQuote = totalQuote[0]['totalQuote']
requestData['checkIn'] = requestData['checkIn']
temp1 = requestData['checkIn'].strftime('%y-%b-%d')
x = temp1.split('-')
requestData['checkIn'] = x[2] + " " + x[1] + ", " + x[0]
requestData['checkOut'] = requestData['checkOut']
temp1 = requestData['checkOut'].strftime('%y-%b-%d')
x = temp1.split('-')
requestData['checkOut'] = x[2] + " " + x[1] + ", " + x[0]
return render_template('request/confirmRequest.html', requestData = requestData, acceptedOn = acceptedOn, totalQuote = totalQuote)
@app.route('/confirmRequestSubmit', methods = ['GET', 'POST'])
@is_logged_in
def confirmRequestSubmit():
inp = request.json
cursor = mysql.connection.cursor()
time = datetime.datetime.utcnow()
hotelId = session.get('hotelId')
cursor.execute(
'SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [inp['id'], hotelId])
email = session['email']
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expectedFare, negotiationReason, timesNegotiated, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse[
'foc'], prevresponse['commission'], prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse[
'negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval10, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], prevresponse['expectedFare'], prevresponse['negotiationReason'], prevresponse['timesNegotiated'], hotelId
])
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg[
'triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s',
[prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
cursor.execute('INSERT INTO confirmRequest(requestId, confirmationCode, comments, submittedBy, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s)', [inp['id'], inp['confirmationCode'], inp['comments'], email, time, hotelId])
cursor.execute('UPDATE request set status = %s where id = %s && hotelId = %s', [
statusval10, inp['id'], hotelId
])
cursor.execute('UPDATE response set status = %s where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [statusval10, inp['id'], hotelId])
cursor.execute('SELECT createdFor from request where id = %s && hotelId = %s', [inp['id'], hotelId])
createdFor = cursor.fetchall()
createdFor = createdFor[0]['createdFor']
cursor.execute('SELECT totalQuote from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [inp['id'], hotelId])
totalQuote = cursor.fetchall()
totalQuote = totalQuote[0]['totalQuote']
mysql.connection.commit()
msg = 'Your request with confirmationCode {} has been confirmed for totalQuote ${}'.format(inp['confirmationCode'], totalQuote)
sendMail2(
subjectv = 'Confirmation Email',
recipientsv = createdFor,
bodyv = msg,
)
flash('The request has been confirmed', 'success')
return ('', 204)
@app.route('/notConfirmRequest', methods=['GET', 'POST'])
@is_logged_in
def notConfirmRequest():
inp = request.json
cursor = mysql.connection.cursor()
email = session['email']
time = datetime.datetime.utcnow()
hotelId = session.get('hotelId')
cursor.execute(
'SELECT * from response where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [inp['id'], hotelId])
email = session['email']
now = datetime.datetime.utcnow()
prevresponse = cursor.fetchall()
if len(prevresponse) != 0:
prevresponse = prevresponse[0]
cursor.execute('INSERT INTO response(requestId, responseId, groupCategory, totalFare, foc, commission, commissionValue, totalQuote, cutoffDays, formPayment, paymentTerms, paymentGtd, negotiable, checkIn, checkOut, submittedBy, submittedOn, status, paymentDays, nights, comments, averageRate, contract, expectedFare, negotiationReason, timesNegotiated, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevresponse['requestId'], prevresponse['responseId'], prevresponse['groupCategory'], prevresponse['totalFare'], prevresponse[
'foc'], prevresponse['commission'], prevresponse['commissionValue'], prevresponse['totalQuote'], prevresponse['cutoffDays'],
prevresponse['formPayment'], prevresponse['paymentTerms'], prevresponse['paymentGtd'], prevresponse[
'negotiable'], prevresponse['checkIn'], prevresponse['checkOut'], email, now,
statusval11, prevresponse['paymentDays'], prevresponse['nights'], prevresponse['comments'],
prevresponse['averageRate'], prevresponse['contract'], prevresponse[
'expectedFare'], prevresponse['negotiationReason'], prevresponse['timesNegotiated'], hotelId
])
cursor.execute(
'SELECT * From responseAvg where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevresponse['responseId'], hotelId])
prevAvg = cursor.fetchall()
if len(prevAvg) != 0:
prevAvg = prevAvg[0]
cursor.execute('INSERT INTO responseAvg(single1, single2, double1, double2, triple1, triple2, quad1, quad2, responseId, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevAvg['single1'], prevAvg['single2'], prevAvg['double1'], prevAvg['double2'], prevAvg[
'triple1'], prevAvg['triple2'], prevAvg['quad1'], prevAvg['quad2'], prevAvg['responseId'], now, hotelId
])
cursor.execute(
'SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [prevAvg['responseId'], hotelId])
submittedOn = cursor.fetchall()
cursor.execute('SELECT * From responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s ',
[prevAvg['responseId'], submittedOn[0]['submittedOn'], hotelId])
prevDaywise = cursor.fetchall()
if len(prevDaywise) != 0:
for p in prevDaywise:
cursor.execute('INSERT INTO responseDaywise(date, currentOcc, discountId, occupancy, type, count, ratePerRoom, responseId, forecast, leadTime, groups, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
p['date'], p['currentOcc'], p['discountId'], p['occupancy'], p['type'], p[
'count'], p['ratePerRoom'], prevAvg['responseId'], p['forecast'], p['leadTime'], p['groups'], now, hotelId
])
cursor.execute('INSERT INTO notConfirmRequest(requestId, confirmationCode, comments, submittedBy, submittedOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s)', [
inp['id'], inp['confirmationCode'], inp['comments'], email, time, hotelId])
cursor.execute('UPDATE request set status = %s where id = %s && hotelId = %s ', [
statusval11, inp['id'], hotelId
])
cursor.execute('UPDATE response set status = %s where requestId = %s && hotelId = %s order by submittedOn desc limit 1', [
statusval11, inp['id'], hotelId])
mysql.connection.commit()
cursor.execute('SELECT createdFor from request where id = %s && hotelId = %s ', [inp['id'], hotelId])
createdFor = cursor.fetchall()
createdFor = createdFor[0]['createdFor']
mysql.connection.commit()
msg = 'Your request with confirmationCode {} has been declined by you for the following reason "{}"'.format(
inp['confirmationCode'], inp['comments'])
sendMail2(
subjectv='Confirmation Email',
recipientsv=createdFor,
bodyv=msg,
)
flash('The request is now declined', 'danger')
return ('', 204)
@app.route('/changeOcc/<id>', methods = ['GET', 'POST'])
@is_logged_in
def changeOcc(id):
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
responseId = id + "R"
cursor.execute('SELECT submittedOn from responseDaywise where responseId = %s && hotelId = %s order by submittedOn desc limit 1', [responseId, hotelId])
submittedOn = cursor.fetchall()
submittedOn = submittedOn[0]['submittedOn']
cursor.execute('SELECT date, currentOcc from responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s ', [responseId, submittedOn, hotelId])
occ = cursor.fetchall()
tempdict = {}
for row in occ:
tempdict[row['date']] = row['currentOcc'].split(" (")[0]
flag = False
for key, value in tempdict.copy().items():
if (value != '' and value != '-'):
flag = True
else:
tempdict.pop(key)
for d in list(tempdict):
y = d
d = d.strftime('%y-%b-%d')
x = d.split('-')
d = x[2] + " " + x[1] + ", " + x[0]
tempdict[d] = tempdict[y]
del tempdict[y]
return render_template('request/getOccEdit.html', occ = tempdict, flag = flag, token = id)
# Request Actions Done
@app.route('/analyticsbehavior', methods = ['GET', 'POST'])
@is_logged_in
def analyticsbehavior():
return render_template('analytics/behavior.html', url = url)
@app.route('/analyticsbehaviorGet', methods = ['GET'])
@is_logged_in
def analyticsbehaviorGet():
cursor = mysql.connection.cursor()
startDate = request.args.get('startDate')
endDate = request.args.get('endDate')
leadtime = request.args.get('leadtime')
category = request.args.get('category')
customerType = request.args.get('customerType')
status = request.args.get('status')
hotelId = session.get('hotelId')
result = {}
result['leadres'] = []
result['category'] = []
result['customerType'] = []
result['statusres'] = []
if leadtime != 'Booking Lead Time':
leadres = []
tempres = {}
if leadtime == "180 +":
lead1 = 180
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && leadTime >= %s && hotelId = %s', [startDate, endDate, lead1, hotelId])
leadres1 = cursor.fetchall()
else:
t1 = leadtime.split(' - ')
lead1 = t1[0]
lead2 = t1[1]
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && leadTime >= %s && leadTime <= %s && hotelId = %s', [startDate, endDate, int(lead1), int(lead2), hotelId])
leadres1 = cursor.fetchall()
tempres['0'] = leadtime
tempres['1'] = len(leadres1)
if len(leadres1) != 0:
nights = 0
for r in leadres1:
nights = nights + int(r['nights'])
nights = nights / len(leadres1)
nights = round(nights, 2)
tempres['2'] = nights
else:
tempres['2'] = 0
leadres.append(tempres)
else:
leadres = []
tempres1 = {}
tempres1['0'] = "0 - 14"
tempres2 = {}
tempres2['0'] = "14 - 45"
tempres3 = {}
tempres3['0'] = "45 - 120"
tempres4 = {}
tempres4['0'] = "120 - 180"
tempres5 = {}
tempres5['0'] = "180 +"
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime >= %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 0, 14, hotelId])
leadres1 = cursor.fetchall()
tempres1['1'] = len(leadres1)
if len(leadres1) != 0:
nights = 0
for r in leadres1:
nights = nights + int(r['nights'])
nights = nights / len(leadres1)
nights = round(nights, 2)
tempres1['2'] = nights
else:
tempres1['2'] = 0
leadres.append(tempres1)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 14, 45, hotelId])
leadres2 = cursor.fetchall()
tempres2['1'] = len(leadres2)
if len(leadres2) != 0:
nights = 0
for r in leadres2:
nights = nights + int(r['nights'])
nights = nights / len(leadres2)
nights = round(nights, 2)
tempres2['2'] = nights
else:
tempres2['2'] = 0
leadres.append(tempres2)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 45, 120, hotelId])
leadres3 = cursor.fetchall()
tempres3['1'] = len(leadres3)
if len(leadres3) != 0:
nights = 0
for r in leadres3:
nights = nights + int(r['nights'])
nights = nights / len(leadres3)
nights = round(nights, 2)
tempres3['2'] = nights
else:
tempres3['2'] = 0
leadres.append(tempres3)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 120, 180, hotelId])
leadres4 = cursor.fetchall()
tempres4['1'] = len(leadres4)
if len(leadres4) != 0:
nights = 0
for r in leadres4:
nights = nights + int(r['nights'])
nights = nights / len(leadres4)
nights = round(nights, 2)
tempres4['2'] = nights
else:
tempres4['2'] = 0
leadres.append(tempres4)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && hotelId = %s', [startDate, endDate, 180, hotelId])
leadres5 = cursor.fetchall()
tempres5['1'] = len(leadres5)
if len(leadres5) != 0:
nights = 0
for r in leadres5:
nights = nights + int(r['nights'])
nights = nights / len(leadres5)
nights = round(nights, 2)
tempres5['2'] = nights
else:
tempres5['2'] = 0
leadres.append(tempres5)
result['leadres'] = leadres
if category != 'Category':
catres = []
tempres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && category = %s && hotelId = %s', [startDate, endDate, category, hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = category
tempres['1'] = len(tempres1)
if len(tempres1) != 0:
nights = 0
for r in tempres1:
nights = nights + int(r['nights'])
nights = nights / len(tempres1)
nights = round(nights, 2)
tempres['2'] = nights
else:
tempres['2'] = 0
catres.append(tempres)
else:
catres = []
cursor.execute('show columns from requestCategory')
categories = cursor.fetchall()
for c in categories:
cat = c['Field']
tempres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && category = %s && hotelId = %s', [startDate, endDate, cat, hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = cat
tempres['1'] = len(tempres1)
if len(tempres1) != 0:
nights = 0
for r in tempres1:
nights = nights + int(r['nights'])
nights = nights / len(tempres1)
nights = round(nights, 2)
tempres['2'] = nights
else:
tempres['2'] = 0
catres.append(tempres)
result['catres'] = catres
if customerType != 'Customer Type':
custres = []
tempres = {}
if (customerType == 'IATA'):
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && userType = %s && hotelId = %s', [startDate, endDate, customerType, hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = customerType
tempres['1'] = len(tempres1)
if len(tempres1) != 0:
nights = 0
for r in tempres1:
nights = nights + int(r['nights'])
nights = nights / len(tempres1)
nights = round(nights, 2)
tempres['2'] = nights
else:
tempres['2'] = 0
catres.append(tempres)
else:
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && userType = %s && hotelId = %s',
[startDate, endDate, "customer", hotelId])
tempres1 = cursor.fetchall()
count = 0
nights = 0
for r in tempres1:
cursor.execute('SELECT userSubType from users where email = %s && hotelId = %s', [r['createdFor'], hotelId])
dd = cursor.fetchall()
if (dd[0]['userSubType'] == customerType):
count = count + 1
nights = nights + int(r['nights'])
if (count != 0):
nights = nights / count
nights = round(nights, 2)
else:
nights = 0
tempres['0'] = customerType
tempres['1'] = count
tempres['2'] = nights
custres.append(tempres)
else:
custres = []
tempres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && userType = %s && hotelId = %s', [startDate, endDate, "IATA", hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = "IATA"
tempres['1'] = len(tempres1)
if len(tempres1) != 0:
nights = 0
for r in tempres1:
nights = nights + int(r['nights'])
nights = nights / len(tempres1)
nights = round(nights, 2)
tempres['2'] = nights
else:
tempres['2'] = 0
custres.append(tempres)
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && userType = %s && hotelId = %s', [startDate, endDate, "customer", hotelId])
count1 = 0
count2 = 0
count3 = 0
night1 = 0
night2 = 0
night3 = 0
tempres1 = cursor.fetchall()
for r in tempres1:
cursor.execute('SELECT userSubType from users where email = %s && hotelId = %s', [r['createdFor'], hotelId])
dd = cursor.fetchall()
if (dd[0]['userSubType'] == 'retail'):
count1 = count1 + 1
night1 = night1 + int(r['nights'])
elif (dd[0]['userSubType'] == 'corporate'):
count2 = count2 + 1
night2 = night2 + int(r['nights'])
elif (dd[0]['userSubType'] == 'tour'):
count3 = count3 + 1
night3 = night3 + int(r['nights'])
if (count1 != 0):
night1 = night1 / count1
night1 = round(night1, 2)
else:
night1 = 0
if (count2 != 0):
night2 = night2 / count2
night2 = round(night2, 2)
else:
night2 = 0
if (count3 != 0):
night3 = night3 / count3
night3 = round(night3, 2)
else:
night3 = 0
tempres1 = {}
tempres2 = {}
tempres3 = {}
tempres1['0'] = "retail"
tempres1['1'] = count1
tempres1['2'] = night1
custres.append(tempres1)
tempres2['0'] = "corporate"
tempres2['1'] = count2
tempres2['2'] = night2
custres.append(tempres2)
tempres3['0'] = "tour"
tempres3['1'] = count3
tempres3['2'] = night3
custres.append(tempres3)
result['custres'] = custres
if (status != 'Status'):
statusres = []
tempres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && status = %s && hotelId = %s', [startDate, endDate, status, hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = status
tempres['1'] = len(tempres1)
if len(tempres1) != 0:
nights = 0
for r in tempres1:
nights = nights + int(r['nights'])
nights = nights / len(tempres1)
nights = round(nights, 2)
tempres['2'] = nights
else:
tempres['2'] = 0
statusres.append(tempres)
else:
statusres = []
statuses = [statusval1, statusval2, statusval3, statusval4, statusval5, statusval6, statusval7, statusval8, statusval9, statusval10, statusval11 ]
for s in statuses:
tempres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && status = %s && hotelId = %s', [startDate, endDate, s, hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = s
tempres['1'] = len(tempres1)
if len(tempres1) != 0:
nights = 0
for r in tempres1:
nights = nights + int(r['nights'])
nights = nights / len(tempres1)
nights = round(nights, 2)
tempres['2'] = nights
else:
tempres['2'] = 0
statusres.append(tempres)
result['statusres'] = statusres
return jsonify(result), 200
@app.route('/analyticsdashboard', methods = ['GET', 'POST'])
@is_logged_in
def analyticsdashboard():
cursor = mysql.connection.cursor()
endDate = datetime.datetime.today()
startDate = endDate - datetime.timedelta(days = 31)
hotelId = session.get('hotelId')
startDatePass = startDate.strftime('%y-%b-%d')
x = startDatePass.split('-')
startDatePass = x[2] + " " + x[1] + ", " + x[0]
endDatePass = endDate.strftime('%y-%b-%d')
x = endDatePass.split('-')
endDatePass = x[2] + " " + x[1] + ", " + x[0]
result = {}
result['leadres'] = []
leadres = []
tempres1 = {}
tempres1['0'] = "0 - 14"
tempres2 = {}
tempres2['0'] = "14 - 45"
tempres3 = {}
tempres3['0'] = "45 - 120"
tempres4 = {}
tempres4['0'] = "120 - 180"
tempres5 = {}
tempres5['0'] = "180 +"
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime >= %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 0, 14, hotelId])
leadres1 = cursor.fetchall()
tempres1['1'] = len(leadres1)
leadres.append(tempres1)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 14, 45, hotelId])
leadres2 = cursor.fetchall()
tempres2['1'] = len(leadres2)
leadres.append(tempres2)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 45, 120, hotelId])
leadres3 = cursor.fetchall()
tempres3['1'] = len(leadres3)
leadres.append(tempres3)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 120, 180, hotelId])
leadres4 = cursor.fetchall()
tempres4['1'] = len(leadres4)
leadres.append(tempres4)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && hotelId = %s', [startDate, endDate, 180, hotelId])
leadres5 = cursor.fetchall()
tempres5['1'] = len(leadres5)
leadres.append(tempres5)
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && hotelId = %s', [startDate, endDate, hotelId])
requests = cursor.fetchall()
table = {
"0 - 2": 0,
"2 - 8": 0,
"8 - 24":0,
"24 +":0,
}
notSubmitted = 0
resHours = []
for r in requests:
cursor.execute('SELECT submittedOn from response where requestId = %s && (status = %s or status = %s && hotelId = %s) order by submittedOn asc limit 1', [r['id'], statusval2, statusval8, hotelId])
res = cursor.fetchall()
if len(res) == 0:
notSubmitted = notSubmitted + 1
else:
difference = abs(res[0]['submittedOn'] - r['createdOn'])
difference = difference.total_seconds()
hours = divmod(difference, 3600)[0]
resHours.append(hours)
if hours >= 0 and hours <= 2:
table["0 - 2"] = table["0 - 2"] + 1
elif hours >2 and hours <= 8:
table["2 - 8"] = table['2 - 8'] + 1
elif hours > 8 and hours <= 24:
table['8 - 24'] = table['8 - 24'] + 1
elif hours > 24:
table["24 +"] = table["24 +"] + 1
hotelres = {}
hotelres['notSubmitted'] = notSubmitted
hotelres['table'] = table
cursor.execute('SELECT DISTINCT responseId From response where submittedOn >= %s && submittedOn <= %s && status = %s && hotelId = %s', [startDate, endDate, statusval2, hotelId])
notSubmitted = 0
table = {
"0 - 2": 0,
"2 - 8": 0,
"8 - 24": 0,
"24 +": 0,
}
responseData = cursor.fetchall()
for r in responseData:
cursor.execute('SELECT submittedOn From response where submittedOn >= %s && submittedOn <= %s && status = %s && responseId = %s && hotelId = %s order by submittedOn asc limit 1', [startDate, endDate, statusval2, r['responseId'], hotelId])
tempres = cursor.fetchall()
if len(tempres) == 0:
notSubmitted = notSubmitted + 1
else:
cursor.execute('SELECT submittedOn From response where submittedOn >= %s && submittedOn <= %s && (status = %s or status = %s) && responseId = %s && hotelId = %s order by submittedOn asc limit 1', [startDate, endDate, statusval4, statusval5, r['responseId'], hotelId])
customerres = cursor.fetchall()
if len(customerres) == 0:
notSubmitted = notSubmitted + 1
else:
difference = customerres[0]['submittedOn'] - tempres[0]['submittedOn']
difference = difference.total_seconds()
hours = divmod(difference, 3600)[0]
if hours >= 0 and hours <= 2:
table["0 - 2"] = table["0 - 2"] + 1
elif hours >2 and hours <= 8:
table["2 - 8"] = table['2 - 8'] + 1
elif hours > 8 and hours <= 24:
table['8 - 24'] = table['8 - 24'] + 1
elif hours > 24:
table["24 +"] = table["24 +"] + 1
customeres = {}
customeres['notSubmitted'] = notSubmitted
customeres['table'] = table
revenueres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && hotelId = %s', [startDate, endDate, hotelId])
tempres1 = cursor.fetchall()
if len(tempres1) != 0:
total1 = 0
total2 = 0
for r in tempres1:
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total2 = total2 + 0
else:
total2 = total2 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total1 = total1 + 0
else:
total1 = total1 + float(res[0]['totalQuote'])
revenueres['1'] = total1
revenueres['2'] = total2
else:
revenueres['1'] = 0
revenueres['2'] = 0
startDate = datetime.datetime.today()
endDate = startDate + datetime.timedelta(days = 5)
cursor.execute('SELECT * From request where checkIn >= %s && checkOut <= %s && hotelId = %s order by checkIn', [startDate, endDate, hotelId])
upcoming = cursor.fetchall()
return render_template('analytics/dashboard.html', leadres = leadres, hotelres = hotelres, revenueres = [revenueres], customeres = [customeres], upcoming = upcoming, url = url, startDatePass = startDatePass, endDatePass = endDatePass)
@app.route('/analyticsperformance', methods = ['GET', 'POST'])
@is_logged_in
def analyticsperformance():
return render_template('analytics/performance.html', url = url)
@app.route('/analyticsperformanceGet', methods = ['GET'])
@is_logged_in
def analyticsperformanceGet():
cursor = mysql.connection.cursor()
startDate = request.args.get('startDate')
endDate = request.args.get('endDate')
hotelId = session.get('hotelId')
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && hotelId = %s', [startDate, endDate, hotelId])
requests = cursor.fetchall()
result = {}
result['requestsNo'] = len(requests)
notSubmitted = 0
resHours = []
table = {
"0 - 2": 0,
"2 - 8": 0,
"8 - 24":0,
"24 +":0,
}
for r in requests:
cursor.execute('SELECT submittedOn from response where requestId = %s && (status = %s or status = %s && hotelId = %s) order by submittedOn asc limit 1', [r['id'], statusval2, statusval8, hotelId])
res = cursor.fetchall()
if len(res) == 0:
notSubmitted = notSubmitted + 1
else:
difference = abs(res[0]['submittedOn'] - r['createdOn'])
difference = difference.total_seconds()
hours = divmod(difference, 3600)[0]
resHours.append(hours)
if hours >= 0 and hours <= 2:
table["0 - 2"] = table["0 - 2"] + 1
elif hours >2 and hours <= 8:
table["2 - 8"] = table['2 - 8'] + 1
elif hours > 8 and hours <= 24:
table['8 - 24'] = table['8 - 24'] + 1
elif hours > 24:
table["24 +"] = table["24 +"] + 1
temp = 0
for r in resHours:
temp = temp + r
if len(resHours) != 0:
temp = temp / len(resHours)
temp = round(temp, 2)
result['time'] = temp
result['notSubmitted'] = notSubmitted
result['table'] = table
count = 0
cursor.execute('SELECT DISTINCT responseId From response where submittedOn >= %s && submittedOn <= %s && status = %s && hotelId = %s', [startDate, endDate, statusval2, hotelId])
notSubmitted = 0
resHours = []
table = {
"0 - 2": 0,
"2 - 8": 0,
"8 - 24": 0,
"24 +": 0,
}
responseData = cursor.fetchall()
for r in responseData:
cursor.execute('SELECT submittedOn From response where submittedOn >= %s && submittedOn <= %s && status = %s && responseId = %s && hotelId = %s order by submittedOn asc limit 1', [startDate, endDate, statusval2, r['responseId'], hotelId])
tempres = cursor.fetchall()
if len(tempres) == 0:
notSubmitted = notSubmitted + 1
else:
count = count + 1
cursor.execute('SELECT submittedOn From response where submittedOn >= %s && submittedOn <= %s && (status = %s or status = %s) && responseId = %s && hotelId = %s order by submittedOn asc limit 1', [startDate, endDate, statusval4, statusval5, r['responseId'], hotelId])
customerres = cursor.fetchall()
if len(customerres) == 0:
notSubmitted = notSubmitted + 1
else:
difference = customerres[0]['submittedOn'] - tempres[0]['submittedOn']
difference = difference.total_seconds()
hours = divmod(difference, 3600)[0]
resHours.append(hours)
if hours >= 0 and hours <= 2:
table["0 - 2"] = table["0 - 2"] + 1
elif hours >2 and hours <= 8:
table["2 - 8"] = table['2 - 8'] + 1
elif hours > 8 and hours <= 24:
table['8 - 24'] = table['8 - 24'] + 1
elif hours > 24:
table["24 +"] = table["24 +"] + 1
temp = 0
for r in resHours:
temp = temp + r
if len(resHours) != 0:
temp = temp / len(resHours)
temp = round(temp, 2)
result['2time'] = temp
result['2notSubmitted'] = notSubmitted
result['2table'] = table
result['responsesNo'] = count
return jsonify(result), 200
@app.route('/analyticsrevenue', methods = ['GET', 'POST'])
@is_logged_in
def analyticsrevenue():
return render_template('analytics/revenue.html', url = url)
@app.route('/analyticsrevenueGet', methods = ['GET', 'POST'])
@is_logged_in
def analyticsrevenueGet():
cursor = mysql.connection.cursor()
startDate = request.args.get('startDate')
endDate = request.args.get('endDate')
category = request.args.get('category')
customerType = request.args.get('customerType')
hotelId = session.get('hotelId')
result = {}
result['category'] = []
result['customerType'] = []
if category != 'Category':
catres = []
tempres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && category = %s && hotelId = %s', [startDate, endDate, category, hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = category
if len(tempres1) != 0:
total1 = 0
total2 = 0
for r in tempres1:
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total2 = total2 + 0
else:
total2 = total2 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total1 = total1 + 0
else:
total1 = total1 + float(res[0]['totalQuote'])
tempres['1'] = total1
tempres['2'] = total2
else:
tempres['1'] = 0
tempres['2'] = 0
catres.append(tempres)
else:
catres = []
cursor.execute('show columns from requestCategory')
categories = cursor.fetchall()
for c in categories:
cat = c['Field']
tempres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && category = %s && hotelId = %s', [startDate, endDate, cat, hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = cat
if len(tempres1) != 0:
total1 = 0
total2 = 0
for r in tempres1:
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total2 = total2 + 0
else:
total2 = total2 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total1 = total1 + 0
else:
total1 = total1 + float(res[0]['totalQuote'])
tempres['1'] = total1
tempres['2'] = total2
else:
tempres['1'] = 0
tempres['2'] = 0
catres.append(tempres)
if customerType != 'Customer Type':
custres = []
tempres = {}
if (customerType == 'IATA'):
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && userType = %s && hotelId = %s', [startDate, endDate, customerType, hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = customerType
if len(tempres1) != 0:
total1 = 0
total2 = 0
for r in tempres1:
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total2 = total2 + 0
else:
total2 = total2 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total1 = total1 + 0
else:
total1 = total1 + float(res[0]['totalQuote'])
tempres['1'] = total1
tempres['2'] = total2
else:
tempres['1'] = 0
tempres['2'] = 0
custres.append(tempres)
else:
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && userType = %s && hotelId = %s',
[startDate, endDate, "customer", hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = customerType
total1 = 0
total2 = 0
for r in tempres1:
cursor.execute('SELECT userSubType from users where email = %s && hotelId = %s', [r['createdFor'], hotelId])
dd = cursor.fetchall()
if (dd[0]['userSubType'] == customerType):
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total2 = total2 + 0
else:
total2 = total2 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total1 = total1 + 0
else:
total1 = total1 + float(res[0]['totalQuote'])
tempres['1'] = total1
tempres['2'] = total2
custres.append(tempres)
else:
custres = []
tempres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && userType = %s && hotelId = %s', [startDate, endDate, "IATA", hotelId])
tempres1 = cursor.fetchall()
tempres['0'] = "IATA"
if len(tempres1) != 0:
total1 = 0
total2 = 0
for r in tempres1:
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total2 = total2 + 0
else:
total2 = total2 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total1 = total1 + 0
else:
total1 = total1 + float(res[0]['totalQuote'])
tempres['1'] = total1
tempres['2'] = total2
else:
tempres['1'] = 0
tempres['2'] = 0
custres.append(tempres)
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && userType = %s && hotelId = %s', [startDate, endDate, "customer", hotelId])
total1 = 0
total2 = 0
total3 = 0
total4 = 0
total5 = 0
total6 = 0
tempres1 = cursor.fetchall()
for r in tempres1:
cursor.execute('SELECT userSubType from users where email = %s && hotelId = %s', [r['createdFor'], hotelId])
dd = cursor.fetchall()
if (dd[0]['userSubType'] == 'retail'):
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total2 = total2 + 0
else:
total2 = total2 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total1 = total1 + 0
else:
total1 = total1 + float(res[0]['totalQuote'])
elif (dd[0]['userSubType'] == 'corporate'):
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total4 = total4 + 0
else:
total4 = total4 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total3 = total3 + 0
else:
total3 = total3 + float(res[0]['totalQuote'])
elif (dd[0]['userSubType'] == 'tour'):
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total6 = total6 + 0
else:
total6 = total6 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total5 = total5 + 0
else:
total5 = total5 + float(res[0]['totalQuote'])
tempres1 = {}
tempres2 = {}
tempres3 = {}
tempres1['0'] = "retail"
tempres1['1'] = total1
tempres1['2'] = total2
custres.append(tempres1)
tempres2['0'] = "corporate"
tempres2['1'] = total3
tempres2['2'] = total4
custres.append(tempres2)
tempres3['0'] = "tour"
tempres3['1'] = total5
tempres3['2'] = total6
custres.append(tempres3)
result['category'] = catres
result['customerType'] = custres
return jsonify(result), 200
@app.route('/analyticstracking', methods = ['GET', 'POST'])
@is_logged_in
def analyticstracking():
cursor = mysql.connection.cursor()
date = datetime.date.today()
enddate = date + datetime.timedelta(days = 31)
enddate = datetime.datetime.combine(enddate, datetime.datetime.min.time())
hotelId = session.get('hotelId')
cursor.execute('SELECT * from settingsTimelimit where hotelId = %s order by submittedOn desc limit 1', [hotelId])
expiry = cursor.fetchall()
if len(expiry) != 0:
expiry = expiry[0]['value']
cursor.execute('SELECT * From request where status = %s && hotelId = %s', [statusval2, hotelId])
requests = cursor.fetchall()
result = []
for r in requests:
tempresult = {}
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], statusval2, hotelId])
response = cursor.fetchall()
submittedOn = response[0]['submittedOn']
expiration = submittedOn + datetime.timedelta(hours = float(expiry))
if expiration < enddate:
tempresult['id'] = r['id']
tempresult['expiry'] = expiration
result.append(tempresult)
cursor.execute('SELECT * From request where status = %s && hotelId = %s', [statusval4, hotelId])
requests = cursor.fetchall()
result2 = []
for r in requests:
tempresult = {}
cursor.execute('SELECT paymentGtd from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], statusval4, hotelId])
response = cursor.fetchall()
if len(response) != 0:
if response[0]['paymentGtd'] == 1:
tempresult['id'] = r['id']
tempresult['groupName'] = r['groupName']
tempresult['checkIn'] = r['checkIn']
tempresult['checkOut'] = r['checkOut']
result2.append(tempresult)
cursor.execute('SELECT * from request where checkIn >= %s && checkOut <= %s && hotelId = %s', [date, enddate, hotelId])
requests = cursor.fetchall()
result3 = []
for r in requests:
tempresult = {}
tempresult['checkIn'] = r['checkIn']
tempresult['checkOut'] = r['checkOut']
tempresult['status'] = r['status']
tempresult['id'] = r['id']
tempresult['category'] = r['category']
tempresult['groupName'] = r['groupName']
result3.append(tempresult)
return render_template('analytics/tracking.html', result = result, result2 = result2, result3 = result3)
@app.route('/stdreport', methods = ['GET', 'POST'])
@is_logged_in
def stdreport():
return render_template('analytics/stdreport.html', url = url)
@app.route('/analyticsstdreportGet', methods = ['GET', 'POST'])
@is_logged_in
def analyticsstdreportGet():
cursor = mysql.connection.cursor()
startDate = request.args.get('startDate')
endDate = request.args.get('endDate')
hotelId = session.get('hotelId')
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && hotelId = %s', [startDate, endDate, hotelId])
requestData = cursor.fetchall()
for r in requestData:
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], statusval2, hotelId])
totalQuote = cursor.fetchall()
if len(totalQuote) == 0:
r['totalQuote'] = 0
r['evaluatedFare'] = 0
else:
r['totalQuote'] = totalQuote[0]['totalQuote']
r['expiryTime'] = totalQuote[0]['expiryTime']
r['negotiationReason'] = totalQuote[0]['negotiationReason']
r['expectedFare'] = totalQuote[0]['expectedFare']
r['overrideReason'] = totalQuote[0]['overrideReason']
r['overrideFlag'] = totalQuote[0]['overrideFlag']
r['timesNegotiated'] = totalQuote[0]['timesNegotiated']
responseId = r['id'] + "R"
submittedOn = totalQuote[0]['submittedOn']
cursor.execute('SELECT * from responseDaywise where responseId = %s and submittedOn = %s && hotelId = %s', [responseId, submittedOn, hotelId])
prev = cursor.fetchall()
total = 0
for p in prev:
rate = p['ratePerRoom'].split('(')
if len(rate) == 1:
total = total + int(p['count']) * float(rate[0])
else:
rate = rate[1].split(' : ')[1].split('[')[0]
total = total + int(p['count']) * float(rate)
r['evaluatedFare'] = total
return {'response' : requestData}, 200
@app.route('/analyticsDashboardGet', methods = ['GET', 'POST'])
@is_logged_in
def analyticsDashboardGet():
startDate = request.args.get('startDate')
endDate = request.args.get('endDate')
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
result = {}
result['leadres'] = []
leadres = []
tempres1 = {}
tempres1['0'] = "0 - 14"
tempres2 = {}
tempres2['0'] = "14 - 45"
tempres3 = {}
tempres3['0'] = "45 - 120"
tempres4 = {}
tempres4['0'] = "120 - 180"
tempres5 = {}
tempres5['0'] = "180 +"
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime >= %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 0, 14, hotelId])
leadres1 = cursor.fetchall()
tempres1['1'] = len(leadres1)
leadres.append(tempres1)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 14, 45, hotelId])
leadres2 = cursor.fetchall()
tempres2['1'] = len(leadres2)
leadres.append(tempres2)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 45, 120, hotelId])
leadres3 = cursor.fetchall()
tempres3['1'] = len(leadres3)
leadres.append(tempres3)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && leadTime <= %s && hotelId = %s', [startDate, endDate, 120, 180, hotelId])
leadres4 = cursor.fetchall()
tempres4['1'] = len(leadres4)
leadres.append(tempres4)
cursor.execute('SELECT * from request where createdOn >= %s && createdOn <= %s && leadTime > %s && hotelId = %s', [startDate, endDate, 180, hotelId])
leadres5 = cursor.fetchall()
tempres5['1'] = len(leadres5)
leadres.append(tempres5)
result['leadres'] = leadres
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && hotelId = %s', [startDate, endDate, hotelId])
requests = cursor.fetchall()
table = {
"0 - 2": 0,
"2 - 8": 0,
"8 - 24":0,
"24 +":0,
}
notSubmitted = 0
resHours = []
for r in requests:
cursor.execute('SELECT submittedOn from response where requestId = %s && (status = %s or status = %s && hotelId = %s) order by submittedOn asc limit 1', [r['id'], statusval2, statusval8, hotelId])
res = cursor.fetchall()
if len(res) == 0:
notSubmitted = notSubmitted + 1
else:
difference = abs(res[0]['submittedOn'] - r['createdOn'])
difference = difference.total_seconds()
hours = divmod(difference, 3600)[0]
resHours.append(hours)
if hours >= 0 and hours <= 2:
table["0 - 2"] = table["0 - 2"] + 1
elif hours >2 and hours <= 8:
table["2 - 8"] = table['2 - 8'] + 1
elif hours > 8 and hours <= 24:
table['8 - 24'] = table['8 - 24'] + 1
elif hours > 24:
table["24 +"] = table["24 +"] + 1
hotelres = {}
hotelres['notSubmitted'] = notSubmitted
hotelres['table'] = table
result['hotelres'] = hotelres
cursor.execute('SELECT DISTINCT responseId From response where submittedOn >= %s && submittedOn <= %s && status = %s && hotelId = %s', [startDate, endDate, statusval2, hotelId])
notSubmitted = 0
table = {
"0 - 2": 0,
"2 - 8": 0,
"8 - 24": 0,
"24 +": 0,
}
responseData = cursor.fetchall()
for r in responseData:
cursor.execute('SELECT submittedOn From response where submittedOn >= %s && submittedOn <= %s && status = %s && responseId = %s && hotelId = %s order by submittedOn asc limit 1', [startDate, endDate, statusval2, r['responseId'], hotelId])
tempres = cursor.fetchall()
if len(tempres) == 0:
notSubmitted = notSubmitted + 1
else:
cursor.execute('SELECT submittedOn From response where submittedOn >= %s && submittedOn <= %s && (status = %s or status = %s) && responseId = %s && hotelId = %s order by submittedOn asc limit 1', [startDate, endDate, statusval4, statusval5, r['responseId'], hotelId])
customerres = cursor.fetchall()
if len(customerres) == 0:
notSubmitted = notSubmitted + 1
else:
difference = customerres[0]['submittedOn'] - tempres[0]['submittedOn']
difference = difference.total_seconds()
hours = divmod(difference, 3600)[0]
if hours >= 0 and hours <= 2:
table["0 - 2"] = table["0 - 2"] + 1
elif hours >2 and hours <= 8:
table["2 - 8"] = table['2 - 8'] + 1
elif hours > 8 and hours <= 24:
table['8 - 24'] = table['8 - 24'] + 1
elif hours > 24:
table["24 +"] = table["24 +"] + 1
customeres = {}
customeres['notSubmitted'] = notSubmitted
customeres['table'] = table
result['customeres'] = customeres
revenueres = {}
cursor.execute('SELECT * From request where createdOn >= %s && createdOn <= %s && hotelId = %s', [startDate, endDate, hotelId])
tempres1 = cursor.fetchall()
if len(tempres1) != 0:
total1 = 0
total2 = 0
for r in tempres1:
if (r['status'] == statusval10):
cursor.execute('SELECT * from response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total2 = total2 + 0
else:
total2 = total2 + float(res[0]['totalQuote'])
elif (r['status'] == statusval2 or r['status'] == statusval4 or r['status'] == statusval8 or r['status'] == statusval11):
cursor.execute('SELECT * From response where requestId = %s && status = %s && hotelId = %s order by submittedOn desc limit 1', [r['id'], r['status'], hotelId])
res = cursor.fetchall()
if len(res) == 0:
total1 = total1 + 0
else:
total1 = total1 + float(res[0]['totalQuote'])
revenueres['1'] = total1
revenueres['2'] = total2
else:
revenueres['1'] = 0
revenueres['2'] = 0
result['revenueres'] = revenueres
return jsonify(result), 200
@app.route('/resubmitRequest', methods = ['GET', 'POST'])
def resubmitRequest():
inp = request.json
username = session['email']
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
cursor.execute('SELECT * From request where id = %s && hotelId = %s', [inp['id'], hotelId])
prevRequest = cursor.fetchall()
cursor.execute('SELECT Count(*) from request where hotelId = %s', [hotelId])
count = cursor.fetchall()
count = count[0]['Count(*)'] + 1
if (count < 10):
id = "TR" + "00" + str(count)
elif (count < 99):
id = "TR" + "0" + str(count)
today = datetime.date.today()
d1 = prevRequest[0]['checkIn']
lead = d1 - today
lead = lead.days
today = datetime.datetime.today()
prevRequest = prevRequest[0]
cursor.execute('INSERT INTO request(category, groupName, checkIn, checkOut, nights, commissionable, groupBlock, foc, foc1, foc2, budget, formPayment, paymentTerms, paymentDays, comments, id, createdBy, createdFor, leadTime, status, userType, createdOn, hotelId) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [
prevRequest['category'], prevRequest['groupName'], prevRequest['checkIn'], prevRequest['checkOut'], prevRequest['nights'], prevRequest['commissionable'], prevRequest['groupBlock'], prevRequest['foc'], prevRequest['foc1'], prevRequest['foc2'], prevRequest['budget'], prevRequest['formPayment'], prevRequest['paymentTerms'], prevRequest['paymentDays'], prevRequest['comments'], id, username, prevRequest['createdFor'], lead, statusval1, prevRequest['userType'], today, hotelId
])
cursor.execute('SELECT * from request1Bed where id = %s && hotelId = %s', [inp['id'], hotelId])
table = cursor.fetchall()
for t in table:
cursor.execute('INSERT INTO request1Bed(date, occupancy, count, id, hotelId) VALUES(%s, %s, %s, %s, %s)', [t['date'], t['occupancy'], t['count'], id, hotelId])
cursor.execute('SELECT * from request2Bed where id = %s && hotelId = %s', [inp['id'], hotelId])
table = cursor.fetchall()
for t in table:
cursor.execute('INSERT INTO request2Bed(date, occupancy, count, id, hotelId) VALUES(%s, %s, %s, %s, %s)', [t['date'], t['occupancy'], t['count'], id, hotelId])
mysql.connection.commit()
flash('Your Request has been entered', 'success')
return ('', 204)
# Req module ended
@app.route('/strategyForecast', methods = ['GET', 'POST'])
@is_logged_in
def strategyForecast():
return render_template('strategy/forecast.html')
@app.route('/strategyEvaluation', methods = ['GET', 'POST'])
@is_logged_in
def strategyEvaluation():
return render_template('strategy/evaluation.html')
@app.route('/strategyAncillary', methods = ['GET', 'POST'])
@is_logged_in
def strategyAncillary():
return render_template('strategy/Ancillary.html')
@app.route('/settingBusinessReward', methods = ['GET', 'POST'])
@is_logged_in
def settingBusinessReward():
return render_template('settings/BusinessReward.html')
@app.route('/addHotel', methods = ['GET', 'POST'])
@is_logged_in
def addHotel():
return render_template('developer/addHotel.html')
@app.route('/addHotelSubmit', methods = ['GET', 'POST'])
@is_logged_in
def addHotelSubmit():
if request.method == 'POST':
hotelName = request.form['hotelName']
email = request.form['email']
address = request.form.get('address')
contactName = request.form['contactName']
city = request.form['city']
state = request.form['state']
country = request.form['country']
phone = request.form['phone']
zipv = request.form['zip']
default_email = request.form['default_email']
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From mapHotelId where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
cursor.execute('INSERT INTO mapHotelId(hotelName, email, address, contactName, city, state, country, phone, zip, default_email) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)', [hotelName, email, address, contactName, city, state, country, phone, zipv, default_email])
cursor.execute('SELECT hotelId from mapHotelId where hotelName = %s && email = %s', [hotelName, email])
hotelId = cursor.fetchall()
hotelId = hotelId[0]['hotelId']
password = sha256_crypt.hash('trompar2020')
firstName = contactName.split(' ')[0]
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) VALUES(%s, %s, %s, %s, %s, %s)', [firstName, email, password, "hoteluser", "hotelAdmin", hotelId])
cursor.execute('INSERT INTO hotelUsers(fullName, email, password, userType, hotelId, email_verified, active) VALUES(%s, %s, %s, %s, %s, %s, %s)', (contactName, email, password, "hotelAdmin", hotelId, 1 ,1))
mysql.connection.commit()
sendMailAddHotel('User Credentials', email)
else:
flash('Email already registered', 'danger')
return render_template('developer/addHotel.html')
flash('New Hotel has been registered', 'success')
return redirect(url_for('home2'))
@app.route('/addCustomer', methods = ['GET', 'POST'])
@is_logged_in
def addCustomer():
return render_template('users/addCustomer.html')
@app.route('/addCustomerSubmit', methods = ['GET', 'POST'])
@is_logged_in
def addCustomerSubmit():
customerType = request.form['customerType']
if customerType == 'iata':
return redirect(url_for('iatar'))
elif customerType == 'retail':
return redirect(url_for('customerr'))
elif customerType == 'corporate':
return redirect(url_for('customerC'))
elif customerType == 'tour':
return redirect(url_for('customerT'))
@app.route('/editHotel', methods = ['GET', 'POST'])
@is_logged_in
def editHotel():
cursor = mysql.connection.cursor()
cursor.execute("SELECT hotelName From mapHotelId")
hotels = cursor.fetchall()
data = []
for hotel in hotels:
data.append(hotel['hotelName'])
hotels = data
return render_template('developer/editHotel.html', hotels = hotels)
@app.route('/eHotel', methods = ['GET', 'POST'])
@is_logged_in
def eHotel():
hotel = request.form.get('hotelName')
print(hotel)
cursor = mysql.connection.cursor()
cursor.execute('SELECT * From mapHotelId where hotelName = %s', [hotel])
data = cursor.fetchall()
if len(data) != 0:
data = data[0]
return render_template('developer/eHotel.html', data = data)
@app.route('/editHotelSubmit', methods = ['GET', 'POST'])
@is_logged_in
def editHotelSubmit():
hotelName = request.form['hotelName']
email = request.form['email']
address = request.form.get('address')
contactName = request.form['contactName']
city = request.form['city']
state = request.form['state']
country = request.form['country']
phone = request.form['phone']
zipv = request.form['zip']
default_email = request.form['default_email']
cursor = mysql.connection.cursor()
cursor.execute('UPDATE mapHotelId set address = %s, contactName = %s, city = %s, state = %s, country = %s, phone = %s, zip = %s, default_email = %s, hotelName = %s where email = %s', [address, contactName, city, state, country, phone, zipv, default_email, hotelName, email])
mysql.connection.commit()
flash('Hotel has been edited', 'success')
return redirect(url_for('home2'))
@app.route('/submitCreateNewUser', methods = ['GET', 'POST'])
@is_logged_in
def submitCreateNewUser():
cursor = mysql.connection.cursor()
hotelId = session.get('hotelId')
inp = request.json
customerType = inp['customerType']
if (customerType == 'retail'):
fullName = inp['retailfullName']
firstName = fullName.split(' ')[0]
email = inp['retailemail']
password = inp['password']
phone = inp['retailphone']
country = inp['retailcountry']
password = sha256_crypt.hash(password)
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
""" sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
) """
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)',
(firstName, email, password, 'customer', 'retail', hotelId))
cursor.execute('INSERT INTO customers(fullName, email, country, phone, password, userType, hotelId) Values(%s, %s, %s, %s, %s, %s, %s)', (fullName, email, country, phone, password, 'retail', hotelId))
mysql.connection.commit()
cursor.close()
else:
flash('Email Already Registered', 'danger')
return ('', 204)
elif (customerType == 'iata'):
fullName = inp['iatafullName']
firstName = fullName.split(' ')[0]
email = inp['iataemail']
password = inp['password']
phone = inp['iataphone']
country = inp['iatacountry']
agencyName = inp['iataagencyName']
iataCode = inp['iataCode']
password = sha256_crypt.hash(password)
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
""" sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
) """
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)',
(firstName, email, password, 'IATA', '', hotelId))
cursor.execute('INSERT INTO iataUsers(fullName, email, country, phone, password, iataCode, agencyName, hotelId) Values(%s, %s, %s, %s, %s, %s, %s, %s)',
(fullName, email, country, phone, password, iataCode, agencyName, hotelId))
mysql.connection.commit()
cursor.close()
else:
flash('Email Already Registered', 'danger')
return ('', 204)
elif (customerType == 'corporate'):
fullName = inp['corpfullName']
firstName = fullName.split(' ')[0]
email = inp['corpemail']
password = inp['password']
phone = inp['corpphone']
country = inp['corpcountry']
organizationName = inp['corporganizationName']
password = sha256_crypt.hash(password)
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
""" sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
) """
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)',
(firstName, email, password, 'customer', 'corporate', hotelId))
cursor.execute('INSERT INTO customers(fullName, email, country, phone, password, userType, organizationName, hotelId) Values(%s, %s, %s, %s, %s, %s, %s, %s)',
(fullName, email, country, phone, password, 'corporate', organizationName, hotelId))
mysql.connection.commit()
cursor.close()
else:
flash('Email Already Registered', 'danger')
return ('', 204)
elif (customerType == 'tour'):
fullName = inp['tourfullName']
firstName = fullName.split(' ')[0]
email = inp['touremail']
password = inp['password']
phone = inp['tourphone']
country = inp['tourcountry']
agencyName = inp['touragencyName']
password = sha256_crypt.hash(password)
cursor.execute('SELECT * From users where email = %s', [email])
data = cursor.fetchall()
if len(data) == 0:
token = generateConfirmationToken(email)
""" sendMail(
subjectv='Confirm Email',
recipientsv=email,
linkv='confirm_email',
tokenv=token,
bodyv='Confirm your email by clicking this link ',
) """
cursor.execute('INSERT INTO users(firstName, email, password, userType, userSubType, hotelId) Values(%s, %s, %s, %s, %s, %s)',
(firstName, email, password, 'customer', 'tour', hotelId))
cursor.execute('INSERT INTO customers(fullName, email, country, phone, password, userType, agencyName, hotelId) Values(%s, %s, %s, %s, %s, %s, %s, %s)',
(fullName, email, country, phone, password, 'tour', agencyName, hotelId))
mysql.connection.commit()
cursor.close()
else:
flash('Email Already Registered', 'danger')
return ('', 204)
flash('New user successfully added', 'success')
return ('', 204)
@app.route('/postMail', methods = ['GET', 'POST'])
def postMail():
return render_template('/mails/postMail.html')
if __name__ == "__main__":
app.run(debug = True, threaded = True)
| 43.563467
| 1,054
| 0.554504
| 29,783
| 295,491
| 5.483397
| 0.033677
| 0.011536
| 0.015669
| 0.018737
| 0.810602
| 0.793169
| 0.773336
| 0.743105
| 0.713469
| 0.692613
| 0
| 0.018964
| 0.295268
| 295,491
| 6,782
| 1,055
| 43.569891
| 0.765278
| 0.001354
| 0
| 0.722575
| 0
| 0.033157
| 0.296439
| 0.014414
| 0.000705
| 0
| 0
| 0
| 0
| 1
| 0.025397
| false
| 0.017637
| 0.002469
| 0.004056
| 0.061199
| 0.000529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1eedd24586d4635c0406030453873dcd6034e7e6
| 3,710
|
py
|
Python
|
src/news/migrations/0002_auto_20170103_1325.py
|
Busaka/esl
|
66ef2216b6bb14be23a59a1b1038d1e897874939
|
[
"MIT"
] | null | null | null |
src/news/migrations/0002_auto_20170103_1325.py
|
Busaka/esl
|
66ef2216b6bb14be23a59a1b1038d1e897874939
|
[
"MIT"
] | null | null | null |
src/news/migrations/0002_auto_20170103_1325.py
|
Busaka/esl
|
66ef2216b6bb14be23a59a1b1038d1e897874939
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-01-03 13:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='new',
name='file_five',
),
migrations.RemoveField(
model_name='new',
name='file_four',
),
migrations.RemoveField(
model_name='new',
name='file_six',
),
migrations.RemoveField(
model_name='new',
name='file_three',
),
migrations.RemoveField(
model_name='new',
name='file_two',
),
migrations.RemoveField(
model_name='new',
name='h2_paragraph1',
),
migrations.RemoveField(
model_name='new',
name='h2_paragraph2',
),
migrations.RemoveField(
model_name='new',
name='h2_paragraph3',
),
migrations.RemoveField(
model_name='new',
name='h3_paragraph1',
),
migrations.RemoveField(
model_name='new',
name='h3_paragraph2',
),
migrations.RemoveField(
model_name='new',
name='h3_paragraph3',
),
migrations.RemoveField(
model_name='new',
name='h4_paragraph1',
),
migrations.RemoveField(
model_name='new',
name='h4_paragraph2',
),
migrations.RemoveField(
model_name='new',
name='h4_paragraph3',
),
migrations.RemoveField(
model_name='new',
name='h5_paragraph1',
),
migrations.RemoveField(
model_name='new',
name='h5_paragraph2',
),
migrations.RemoveField(
model_name='new',
name='h5_paragraph3',
),
migrations.RemoveField(
model_name='new',
name='h6_paragraph1',
),
migrations.RemoveField(
model_name='new',
name='h6_paragraph2',
),
migrations.RemoveField(
model_name='new',
name='h6_paragraph3',
),
migrations.RemoveField(
model_name='new',
name='heading_five',
),
migrations.RemoveField(
model_name='new',
name='heading_four',
),
migrations.RemoveField(
model_name='new',
name='heading_six',
),
migrations.RemoveField(
model_name='new',
name='heading_three',
),
migrations.RemoveField(
model_name='new',
name='heading_two',
),
migrations.RemoveField(
model_name='new',
name='image_five',
),
migrations.RemoveField(
model_name='new',
name='image_four',
),
migrations.RemoveField(
model_name='new',
name='image_six',
),
migrations.RemoveField(
model_name='new',
name='image_three',
),
migrations.AlterField(
model_name='new',
name='image_one',
field=models.ImageField(blank=True, upload_to='news/news_photos'),
),
migrations.AlterField(
model_name='new',
name='image_two',
field=models.ImageField(blank=True, upload_to='news/news_photos'),
),
]
| 26.126761
| 78
| 0.492183
| 303
| 3,710
| 5.788779
| 0.194719
| 0.159065
| 0.212087
| 0.282782
| 0.891106
| 0.891106
| 0.891106
| 0.059293
| 0.059293
| 0.059293
| 0
| 0.022616
| 0.392183
| 3,710
| 141
| 79
| 26.312057
| 0.755211
| 0.018329
| 0
| 0.708955
| 1
| 0
| 0.136576
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.014925
| 0
| 0.037313
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4802b467ac238ff399731cc5148c9ebdb3ba982b
| 3,606
|
py
|
Python
|
unit_tests/test_manage_wounds.py
|
RozanovN/PythonTextBasedAdventureGame
|
175f154c2c1e697bf9bf9ffcf82c820e2b17ecbd
|
[
"MIT",
"Unlicense"
] | null | null | null |
unit_tests/test_manage_wounds.py
|
RozanovN/PythonTextBasedAdventureGame
|
175f154c2c1e697bf9bf9ffcf82c820e2b17ecbd
|
[
"MIT",
"Unlicense"
] | null | null | null |
unit_tests/test_manage_wounds.py
|
RozanovN/PythonTextBasedAdventureGame
|
175f154c2c1e697bf9bf9ffcf82c820e2b17ecbd
|
[
"MIT",
"Unlicense"
] | null | null | null |
from unittest import TestCase
from game import manage_wounds as manage_wounds
from unittest.mock import patch
import io
class Test(TestCase):
@patch('game.has_evaded', return_value=True)
@patch('builtins.input', return_value="1")
@patch('sys.stdout', new_callable=io.StringIO)
def test_manage_wounds_has_evaded_print_statement(self, mock_stdout, _, __):
character = {"Name": "abc", "Current wounds": 5}
damage = 5
expected = "[1;32mHowever, abc evades it.[0;20m\n"\
"[1;32m\n"\
"Enter anything to continue:[0;20m"
manage_wounds(damage, character)
actual = mock_stdout.getvalue()
self.assertEqual(expected + "\n", actual)
@patch('game.has_evaded', return_value=False)
@patch('game.has_sustained', return_value=True)
@patch('builtins.input', return_value="1")
@patch('sys.stdout', new_callable=io.StringIO)
def test_manage_wounds_has_sustained_print_statement(self, mock_stdout, _, __, ___):
character = {"Name": "abc", "Current wounds": 5}
damage = 5
expected = "[1;31mAbc was not able to evade.[0;20m\n" \
"[1;32mHowever, abc sustains it and only receives 2 damage.[0;20m\n"\
"[1;32m\n" \
"Enter anything to continue:[0;20m"
manage_wounds(damage, character)
actual = mock_stdout.getvalue()
self.assertEqual(expected + "\n", actual)
@patch('game.has_evaded', return_value=False)
@patch('game.has_sustained', return_value=False)
@patch('builtins.input', return_value="1")
@patch('sys.stdout', new_callable=io.StringIO)
def test_manage_wounds_full_damage_statement(self, mock_stdout, _, __, ___):
character = {"Name": "abc", "Current wounds": 5}
damage = 5
expected = "[1;31mAbc was not able to evade.[0;20m\n" \
"[1;31mAbc was not able to sustain.[0;20m\n"\
"[1;32m\n" \
"Enter anything to continue:[0;20m"
manage_wounds(damage, character)
actual = mock_stdout.getvalue()
self.assertEqual(expected + "\n", actual)
@patch('game.has_evaded', return_value=True)
@patch('game.has_sustained', return_value=False)
@patch('builtins.input', return_value="1")
def test_manage_wounds_has_evaded_wounds_are_not_changed(self, _, __, ___):
character = {"Name": "abc", "Current wounds": 5}
damage = 5
expected = {"Name": "abc", "Current wounds": 5}
manage_wounds(damage, character)
actual = character
self.assertEqual(expected, actual)
@patch('game.has_evaded', return_value=False)
@patch('game.has_sustained', return_value=True)
@patch('builtins.input', return_value="1")
def test_manage_wounds_has_sustain_receives_half_of_damage(self, _, __, ___):
character = {"Name": "abc", "Current wounds": 5}
damage = 5
expected = {"Name": "abc", "Current wounds": 3}
manage_wounds(damage, character)
actual = character
self.assertEqual(expected, actual)
@patch('game.has_evaded', return_value=False)
@patch('game.has_sustained', return_value=False)
@patch('builtins.input', return_value="1")
def test_manage_wounds_full_damage_receives_full_damage(self, _, __, ___):
character = {"Name": "abc", "Current wounds": 5}
damage = 5
expected = {"Name": "abc", "Current wounds": 0}
manage_wounds(damage, character)
actual = character
self.assertEqual(expected, actual)
| 42.928571
| 90
| 0.63228
| 452
| 3,606
| 4.818584
| 0.15708
| 0.085859
| 0.060606
| 0.082645
| 0.883379
| 0.88292
| 0.868228
| 0.859963
| 0.849862
| 0.849862
| 0
| 0.025189
| 0.22934
| 3,606
| 83
| 91
| 43.445783
| 0.752789
| 0
| 0
| 0.773333
| 0
| 0
| 0.238214
| 0
| 0
| 0
| 0
| 0
| 0.08
| 1
| 0.08
| false
| 0
| 0.053333
| 0
| 0.146667
| 0.026667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4864337d19c3fe235787df36f490760e262f39db
| 98
|
py
|
Python
|
api/views/index.py
|
cderwin/maps
|
0146260935a749679396022b6d2b1d90b6df2539
|
[
"MIT"
] | null | null | null |
api/views/index.py
|
cderwin/maps
|
0146260935a749679396022b6d2b1d90b6df2539
|
[
"MIT"
] | 7
|
2016-02-09T07:18:48.000Z
|
2016-02-09T07:25:40.000Z
|
api/views/index.py
|
cderwin/maps
|
0146260935a749679396022b6d2b1d90b6df2539
|
[
"MIT"
] | null | null | null |
from flask import render_template
def handle_request():
return render_template('index.html')
| 19.6
| 40
| 0.785714
| 13
| 98
| 5.692308
| 0.846154
| 0.378378
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.132653
| 98
| 4
| 41
| 24.5
| 0.870588
| 0
| 0
| 0
| 0
| 0
| 0.102041
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 8
|
48770de197518cc8b5177383eff2b85e1919dfee
| 118
|
py
|
Python
|
locale/pot/api/plotting/_autosummary/pyvista-themes-ParaViewTheme-render_points_as_spheres-1.py
|
tkoyama010/pyvista-doc-translations
|
23bb813387b7f8bfe17e86c2244d5dd2243990db
|
[
"MIT"
] | 4
|
2020-08-07T08:19:19.000Z
|
2020-12-04T09:51:11.000Z
|
locale/pot/api/plotting/_autosummary/pyvista-themes-DocumentTheme-render_points_as_spheres-1.py
|
tkoyama010/pyvista-doc-translations
|
23bb813387b7f8bfe17e86c2244d5dd2243990db
|
[
"MIT"
] | 19
|
2020-08-06T00:24:30.000Z
|
2022-03-30T19:22:24.000Z
|
locale/pot/api/plotting/_autosummary/pyvista-themes-ParaViewTheme-render_points_as_spheres-1.py
|
tkoyama010/pyvista-doc-translations
|
23bb813387b7f8bfe17e86c2244d5dd2243990db
|
[
"MIT"
] | 1
|
2021-03-09T07:50:40.000Z
|
2021-03-09T07:50:40.000Z
|
# Render points as spheres by default globally.
#
import pyvista
pyvista.global_theme.render_points_as_spheres = True
| 23.6
| 52
| 0.830508
| 17
| 118
| 5.529412
| 0.705882
| 0.255319
| 0.297872
| 0.446809
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.118644
| 118
| 4
| 53
| 29.5
| 0.903846
| 0.381356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
488239e6138e4104f3cba7115560987f7a7fb3b2
| 135
|
py
|
Python
|
xmlrpc2/requests.py
|
dstufft/xmlrpc2
|
f997562bdb699481a5baf95483a940dfd13ffe76
|
[
"BSD-2-Clause"
] | 2
|
2017-01-16T07:03:34.000Z
|
2020-02-08T11:03:58.000Z
|
xmlrpc2/requests.py
|
dstufft/xmlrpc2
|
f997562bdb699481a5baf95483a940dfd13ffe76
|
[
"BSD-2-Clause"
] | 3
|
2015-05-12T05:36:42.000Z
|
2018-03-18T15:48:56.000Z
|
xmlrpc2/requests.py
|
dstufft/xmlrpc2
|
f997562bdb699481a5baf95483a940dfd13ffe76
|
[
"BSD-2-Clause"
] | 4
|
2015-05-11T17:14:03.000Z
|
2019-08-11T21:00:04.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from requests import *
| 22.5
| 39
| 0.866667
| 17
| 135
| 6.058824
| 0.470588
| 0.291262
| 0.466019
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125926
| 135
| 5
| 40
| 27
| 0.872881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
48885a9df1b4a946e542e5eeb28695ce7f5a806d
| 3,405
|
py
|
Python
|
azurebotservice.py
|
z1pti3/jimiPlugin-azurebotservice
|
23f6250377a5870508f58ac824c7f8e840163fae
|
[
"Apache-2.0"
] | 1
|
2021-07-26T15:02:49.000Z
|
2021-07-26T15:02:49.000Z
|
azurebotservice.py
|
z1pti3/jimiPlugin-azurebotservice
|
23f6250377a5870508f58ac824c7f8e840163fae
|
[
"Apache-2.0"
] | null | null | null |
azurebotservice.py
|
z1pti3/jimiPlugin-azurebotservice
|
23f6250377a5870508f58ac824c7f8e840163fae
|
[
"Apache-2.0"
] | null | null | null |
from core import plugin, model
class _azurebotservice(plugin._plugin):
version = 0.141
def install(self):
# Register models
model.registerModel("azurebotserviceReply","_azurebotserviceReply","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceWait","_azurebotserviceWait","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceEnd","_azurebotserviceEnd","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceSend","_azurebotserviceSend","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceUpdateActivity","_azurebotserviceUpdateActivity","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceGetActivityMembers","_azurebotserviceGetActivityMembers","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceconversation","_azurebotserviceconversation","_document","plugins.azurebotservice.models.conversation")
model.registerModel("azurebotserviceIncomingMessage","_azurebotserviceIncomingMessage","_trigger","plugins.azurebotservice.models.trigger")
return True
def uninstall(self):
# deregister models
model.deregisterModel("azurebotserviceReply","_azurebotserviceReply","_action","plugins.azurebotservice.models.action")
model.deregisterModel("azurebotserviceWait","_azurebotserviceWait","_action","plugins.azurebotservice.models.action")
model.deregisterModel("azurebotserviceEnd","_azurebotserviceEnd","_action","plugins.azurebotservice.models.action")
model.deregisterModel("azurebotserviceSend","_azurebotserviceSend","_action","plugins.azurebotservice.models.action")
model.deregisterModel("azurebotserviceUpdateActivity","_azurebotserviceUpdateActivity","_action","plugins.azurebotservice.models.action")
model.deregisterModel("azurebotserviceGetActivityMembers","_azurebotserviceGetActivityMembers","_action","plugins.azurebotservice.models.action")
model.deregisterModel("azurebotserviceconversation","_azurebotserviceconversation","_document","plugins.azurebotservice.models.conversation")
model.deregisterModel("azurebotserviceIncomingMessage","_azurebotserviceIncomingMessage","_action","plugins.azurebotservice.models.trigger")
return True
def upgrade(self,LatestPluginVersion):
if self.version < 0.11:
model.registerModel("azurebotserviceWait","_azurebotserviceWait","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceconversation","_azurebotserviceconversation","_document","plugins.azurebotservice.models.conversation")
if self.version < 0.13:
model.registerModel("azurebotserviceEnd","_azurebotserviceEnd","_action","plugins.azurebotservice.models.action")
if self.version < 0.14:
model.registerModel("azurebotserviceGetActivityMembers","_azurebotserviceGetActivityMembers","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceSend","_azurebotserviceSend","_action","plugins.azurebotservice.models.action")
model.registerModel("azurebotserviceUpdateActivity","_azurebotserviceUpdateActivity","_action","plugins.azurebotservice.models.action")
| 85.125
| 155
| 0.782379
| 243
| 3,405
| 10.773663
| 0.164609
| 0.184874
| 0.235294
| 0.233766
| 0.846448
| 0.846448
| 0.846448
| 0.775401
| 0.493506
| 0.44385
| 0
| 0.004236
| 0.098678
| 3,405
| 39
| 156
| 87.307692
| 0.848811
| 0.009692
| 0
| 0.424242
| 0
| 0
| 0.619472
| 0.457406
| 0
| 0
| 0
| 0
| 0
| 1
| 0.090909
| false
| 0
| 0.030303
| 0
| 0.242424
| 0
| 0
| 0
| 1
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
48891aa5b15420225bf0c353062228b974a38b35
| 383,971
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_sla_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_sla_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_infra_sla_oper.py
|
Maikor/ydk-py
|
b86c4a7c570ae3b2c5557d098420446df5de4929
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
""" Cisco_IOS_XR_infra_sla_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR infra\-sla package operational data.
This module contains definitions
for the following management objects\:
sla\: SLA oper commands
sla\-nodes\: sla nodes
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class Sla(Entity):
"""
SLA oper commands
.. attribute:: protocols
Table of all SLA protocols
**type**\: :py:class:`Protocols <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols>`
"""
_prefix = 'infra-sla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Sla, self).__init__()
self._top_entity = None
self.yang_name = "sla"
self.yang_parent_name = "Cisco-IOS-XR-infra-sla-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("protocols", ("protocols", Sla.Protocols))])
self._leafs = OrderedDict()
self.protocols = Sla.Protocols()
self.protocols.parent = self
self._children_name_map["protocols"] = "protocols"
self._segment_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla, [], name, value)
class Protocols(Entity):
"""
Table of all SLA protocols
.. attribute:: ethernet
The Ethernet SLA protocol
**type**\: :py:class:`Ethernet <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet>`
"""
_prefix = 'infra-sla-oper'
_revision = '2015-11-09'
def __init__(self):
super(Sla.Protocols, self).__init__()
self.yang_name = "protocols"
self.yang_parent_name = "sla"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("Cisco-IOS-XR-ethernet-cfm-oper:ethernet", ("ethernet", Sla.Protocols.Ethernet))])
self._leafs = OrderedDict()
self.ethernet = Sla.Protocols.Ethernet()
self.ethernet.parent = self
self._children_name_map["ethernet"] = "Cisco-IOS-XR-ethernet-cfm-oper:ethernet"
self._segment_path = lambda: "protocols"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols, [], name, value)
class Ethernet(Entity):
"""
The Ethernet SLA protocol
.. attribute:: statistics_on_demand_currents
Table of current statistics for SLA on\-demand operations
**type**\: :py:class:`StatisticsOnDemandCurrents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents>`
.. attribute:: operations
Table of SLA operations
**type**\: :py:class:`Operations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations>`
.. attribute:: statistics_historicals
Table of historical statistics for SLA operations
**type**\: :py:class:`StatisticsHistoricals <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals>`
.. attribute:: statistics_on_demand_historicals
Table of historical statistics for SLA on\-demand operations
**type**\: :py:class:`StatisticsOnDemandHistoricals <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals>`
.. attribute:: config_errors
Table of SLA configuration errors on configured operations
**type**\: :py:class:`ConfigErrors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.ConfigErrors>`
.. attribute:: on_demand_operations
Table of SLA on\-demand operations
**type**\: :py:class:`OnDemandOperations <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations>`
.. attribute:: statistics_currents
Table of current statistics for SLA operations
**type**\: :py:class:`StatisticsCurrents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet, self).__init__()
self.yang_name = "ethernet"
self.yang_parent_name = "protocols"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("statistics-on-demand-currents", ("statistics_on_demand_currents", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents)), ("operations", ("operations", Sla.Protocols.Ethernet.Operations)), ("statistics-historicals", ("statistics_historicals", Sla.Protocols.Ethernet.StatisticsHistoricals)), ("statistics-on-demand-historicals", ("statistics_on_demand_historicals", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals)), ("config-errors", ("config_errors", Sla.Protocols.Ethernet.ConfigErrors)), ("on-demand-operations", ("on_demand_operations", Sla.Protocols.Ethernet.OnDemandOperations)), ("statistics-currents", ("statistics_currents", Sla.Protocols.Ethernet.StatisticsCurrents))])
self._leafs = OrderedDict()
self.statistics_on_demand_currents = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents()
self.statistics_on_demand_currents.parent = self
self._children_name_map["statistics_on_demand_currents"] = "statistics-on-demand-currents"
self.operations = Sla.Protocols.Ethernet.Operations()
self.operations.parent = self
self._children_name_map["operations"] = "operations"
self.statistics_historicals = Sla.Protocols.Ethernet.StatisticsHistoricals()
self.statistics_historicals.parent = self
self._children_name_map["statistics_historicals"] = "statistics-historicals"
self.statistics_on_demand_historicals = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals()
self.statistics_on_demand_historicals.parent = self
self._children_name_map["statistics_on_demand_historicals"] = "statistics-on-demand-historicals"
self.config_errors = Sla.Protocols.Ethernet.ConfigErrors()
self.config_errors.parent = self
self._children_name_map["config_errors"] = "config-errors"
self.on_demand_operations = Sla.Protocols.Ethernet.OnDemandOperations()
self.on_demand_operations.parent = self
self._children_name_map["on_demand_operations"] = "on-demand-operations"
self.statistics_currents = Sla.Protocols.Ethernet.StatisticsCurrents()
self.statistics_currents.parent = self
self._children_name_map["statistics_currents"] = "statistics-currents"
self._segment_path = lambda: "Cisco-IOS-XR-ethernet-cfm-oper:ethernet"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet, [], name, value)
class StatisticsOnDemandCurrents(Entity):
"""
Table of current statistics for SLA on\-demand
operations
.. attribute:: statistics_on_demand_current
Current statistics data for an SLA on\-demand operation
**type**\: list of :py:class:`StatisticsOnDemandCurrent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents, self).__init__()
self.yang_name = "statistics-on-demand-currents"
self.yang_parent_name = "ethernet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("statistics-on-demand-current", ("statistics_on_demand_current", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent))])
self._leafs = OrderedDict()
self.statistics_on_demand_current = YList(self)
self._segment_path = lambda: "statistics-on-demand-currents"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents, [], name, value)
class StatisticsOnDemandCurrent(Entity):
"""
Current statistics data for an SLA on\-demand
operation
.. attribute:: operation_id
Operation ID
**type**\: int
**range:** 1..4294967295
.. attribute:: domain_name
Domain name
**type**\: str
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: mep_id
MEP ID in the range 1 to 8191. Either MEP ID or MAC address must be specified
**type**\: int
**range:** 1..8191
.. attribute:: mac_address
Unicast MAC Address in xxxx.xxxx.xxxx format. Either MEP ID or MAC address must be specified
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: specific_options
Options specific to the type of operation
**type**\: :py:class:`SpecificOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions>`
.. attribute:: operation_schedule
Operation schedule
**type**\: :py:class:`OperationSchedule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule>`
.. attribute:: probe_type
Type of probe used by the operation
**type**\: str
.. attribute:: display_short
Short display name used by the operation
**type**\: str
.. attribute:: display_long
Long display name used by the operation
**type**\: str
.. attribute:: flr_calculation_interval
Interval between FLR calculations for SLM, in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: operation_metric
Metrics gathered for the operation
**type**\: list of :py:class:`OperationMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent, self).__init__()
self.yang_name = "statistics-on-demand-current"
self.yang_parent_name = "statistics-on-demand-currents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("specific-options", ("specific_options", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions)), ("operation-schedule", ("operation_schedule", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule)), ("operation-metric", ("operation_metric", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric))])
self._leafs = OrderedDict([
('operation_id', (YLeaf(YType.uint32, 'operation-id'), ['int'])),
('domain_name', (YLeaf(YType.str, 'domain-name'), ['str'])),
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('mep_id', (YLeaf(YType.uint32, 'mep-id'), ['int'])),
('mac_address', (YLeaf(YType.str, 'mac-address'), ['str'])),
('probe_type', (YLeaf(YType.str, 'probe-type'), ['str'])),
('display_short', (YLeaf(YType.str, 'display-short'), ['str'])),
('display_long', (YLeaf(YType.str, 'display-long'), ['str'])),
('flr_calculation_interval', (YLeaf(YType.uint32, 'flr-calculation-interval'), ['int'])),
])
self.operation_id = None
self.domain_name = None
self.interface_name = None
self.mep_id = None
self.mac_address = None
self.probe_type = None
self.display_short = None
self.display_long = None
self.flr_calculation_interval = None
self.specific_options = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions()
self.specific_options.parent = self
self._children_name_map["specific_options"] = "specific-options"
self.operation_schedule = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule()
self.operation_schedule.parent = self
self._children_name_map["operation_schedule"] = "operation-schedule"
self.operation_metric = YList(self)
self._segment_path = lambda: "statistics-on-demand-current"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent, ['operation_id', 'domain_name', 'interface_name', 'mep_id', 'mac_address', 'probe_type', 'display_short', 'display_long', 'flr_calculation_interval'], name, value)
class SpecificOptions(Entity):
"""
Options specific to the type of operation
.. attribute:: configured_operation_options
Parameters for a configured operation
**type**\: :py:class:`ConfiguredOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions>`
.. attribute:: ondemand_operation_options
Parameters for an ondemand operation
**type**\: :py:class:`OndemandOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions>`
.. attribute:: oper_type
OperType
**type**\: :py:class:`SlaOperOperation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperOperation>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions, self).__init__()
self.yang_name = "specific-options"
self.yang_parent_name = "statistics-on-demand-current"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("configured-operation-options", ("configured_operation_options", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions)), ("ondemand-operation-options", ("ondemand_operation_options", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions))])
self._leafs = OrderedDict([
('oper_type', (YLeaf(YType.enumeration, 'oper-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperation', '')])),
])
self.oper_type = None
self.configured_operation_options = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions()
self.configured_operation_options.parent = self
self._children_name_map["configured_operation_options"] = "configured-operation-options"
self.ondemand_operation_options = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions()
self.ondemand_operation_options.parent = self
self._children_name_map["ondemand_operation_options"] = "ondemand-operation-options"
self._segment_path = lambda: "specific-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions, ['oper_type'], name, value)
class ConfiguredOperationOptions(Entity):
"""
Parameters for a configured operation
.. attribute:: profile_name
Name of the profile used by the operation
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions, self).__init__()
self.yang_name = "configured-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
])
self.profile_name = None
self._segment_path = lambda: "configured-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.ConfiguredOperationOptions, ['profile_name'], name, value)
class OndemandOperationOptions(Entity):
"""
Parameters for an ondemand operation
.. attribute:: ondemand_operation_id
ID of the ondemand operation
**type**\: int
**range:** 0..4294967295
.. attribute:: probe_count
Total number of probes sent during the operation
**type**\: int
**range:** 0..255
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions, self).__init__()
self.yang_name = "ondemand-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ondemand_operation_id', (YLeaf(YType.uint32, 'ondemand-operation-id'), ['int'])),
('probe_count', (YLeaf(YType.uint8, 'probe-count'), ['int'])),
])
self.ondemand_operation_id = None
self.probe_count = None
self._segment_path = lambda: "ondemand-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.SpecificOptions.OndemandOperationOptions, ['ondemand_operation_id', 'probe_count'], name, value)
class OperationSchedule(Entity):
"""
Operation schedule
.. attribute:: start_time
Start time of the first probe, in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time_configured
Whether or not the operation start time was explicitly configured
**type**\: bool
.. attribute:: schedule_duration
Duration of a probe for the operation in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: schedule_interval
Interval between the start times of consecutive probes, in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule, self).__init__()
self.yang_name = "operation-schedule"
self.yang_parent_name = "statistics-on-demand-current"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time', (YLeaf(YType.uint32, 'start-time'), ['int'])),
('start_time_configured', (YLeaf(YType.boolean, 'start-time-configured'), ['bool'])),
('schedule_duration', (YLeaf(YType.uint32, 'schedule-duration'), ['int'])),
('schedule_interval', (YLeaf(YType.uint32, 'schedule-interval'), ['int'])),
])
self.start_time = None
self.start_time_configured = None
self.schedule_duration = None
self.schedule_interval = None
self._segment_path = lambda: "operation-schedule"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationSchedule, ['start_time', 'start_time_configured', 'schedule_duration', 'schedule_interval'], name, value)
class OperationMetric(Entity):
"""
Metrics gathered for the operation
.. attribute:: config
Configuration of the metric
**type**\: :py:class:`Config <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config>`
.. attribute:: bucket
Buckets stored for the metric
**type**\: list of :py:class:`Bucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric, self).__init__()
self.yang_name = "operation-metric"
self.yang_parent_name = "statistics-on-demand-current"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("config", ("config", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config)), ("bucket", ("bucket", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket))])
self._leafs = OrderedDict()
self.config = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self.bucket = YList(self)
self._segment_path = lambda: "operation-metric"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric, [], name, value)
class Config(Entity):
"""
Configuration of the metric
.. attribute:: metric_type
Type of metric to which this configuration applies
**type**\: :py:class:`SlaRecordableMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaRecordableMetric>`
.. attribute:: bins_count
Total number of bins into which to aggregate. 0 if no aggregation
**type**\: int
**range:** 0..65535
.. attribute:: bins_width
Width of each bin into which to aggregate. 0 if no aggregation. For SLM, the units of this value are in single units of percent; for LMM they are in tenths of percent; for other measurements they are in milliseconds
**type**\: int
**range:** 0..65535
.. attribute:: bucket_size
Size of buckets into which measurements are collected
**type**\: int
**range:** 0..255
.. attribute:: bucket_size_unit
Whether bucket size is 'per\-probe' or 'probes'
**type**\: :py:class:`SlaBucketSize <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaBucketSize>`
.. attribute:: buckets_archive
Maximum number of buckets to store in memory
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric_type', (YLeaf(YType.enumeration, 'metric-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetric', '')])),
('bins_count', (YLeaf(YType.uint16, 'bins-count'), ['int'])),
('bins_width', (YLeaf(YType.uint16, 'bins-width'), ['int'])),
('bucket_size', (YLeaf(YType.uint8, 'bucket-size'), ['int'])),
('bucket_size_unit', (YLeaf(YType.enumeration, 'bucket-size-unit'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSize', '')])),
('buckets_archive', (YLeaf(YType.uint32, 'buckets-archive'), ['int'])),
])
self.metric_type = None
self.bins_count = None
self.bins_width = None
self.bucket_size = None
self.bucket_size_unit = None
self.buckets_archive = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Config, ['metric_type', 'bins_count', 'bins_width', 'bucket_size', 'bucket_size_unit', 'buckets_archive'], name, value)
class Bucket(Entity):
"""
Buckets stored for the metric
.. attribute:: contents
The contents of the bucket; bins or samples
**type**\: :py:class:`Contents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents>`
.. attribute:: start_at
Absolute time that the bucket started being filled at
**type**\: int
**range:** 0..4294967295
.. attribute:: duration
Length of time for which the bucket is being filled in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: sent
Number of packets sent in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: lost
Number of lost packets in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: corrupt
Number of corrupt packets in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_order
Number of packets recieved out\-of\-order in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: duplicates
Number of duplicate packets received in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: minimum
Overall minimum result in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: maximum
Overall minimum result in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: time_of_minimum
Absolute time that the minimum value was recorded
**type**\: int
**range:** 0..4294967295
.. attribute:: time_of_maximum
Absolute time that the maximum value was recorded
**type**\: int
**range:** 0..4294967295
.. attribute:: average
Mean of the results in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: standard_deviation
Standard deviation of the results in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: result_count
The count of samples collected in the bucket
**type**\: int
**range:** 0..4294967295
.. attribute:: data_sent_count
The number of data packets sent across the bucket, used in the calculation of overall FLR
**type**\: int
**range:** 0..4294967295
.. attribute:: data_lost_count
The number of data packets lost across the bucket, used in the calculation of overall FLR
**type**\: int
**range:** 0..4294967295
.. attribute:: overall_flr
Frame Loss Ratio across the whole bucket, in millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: suspect_start_mid_bucket
Results suspect due to a probe starting mid\-way through a bucket
**type**\: bool
.. attribute:: suspect_schedule_latency
Results suspect due to scheduling latency causing one or more packets to not be sent
**type**\: bool
.. attribute:: suspect_send_fail
Results suspect due to failure to send one or more packets
**type**\: bool
.. attribute:: suspect_premature_end
Results suspect due to a probe ending prematurely
**type**\: bool
.. attribute:: suspect_clock_drift
Results suspect as more than 10 seconds time drift detected
**type**\: bool
.. attribute:: suspect_memory_allocation_failed
Results suspect due to a memory allocation failure
**type**\: bool
.. attribute:: suspect_cleared_mid_bucket
Results suspect as bucket was cleared mid\-way through being filled
**type**\: bool
.. attribute:: suspect_probe_restarted
Results suspect as probe restarted mid\-way through the bucket
**type**\: bool
.. attribute:: suspect_management_latency
Results suspect as processing of results has been delayed
**type**\: bool
.. attribute:: suspect_multiple_buckets
Results suspect as the probe has been configured across multiple buckets
**type**\: bool
.. attribute:: suspect_misordering
Results suspect as misordering has been detected , affecting results
**type**\: bool
.. attribute:: suspect_flr_low_packet_count
Results suspect as FLR calculated based on a low packet count
**type**\: bool
.. attribute:: premature_reason
If the probe ended prematurely, the error that caused a probe to end
**type**\: int
**range:** 0..4294967295
.. attribute:: premature_reason_string
Description of the error code that caused the probe to end prematurely. For informational purposes only
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket, self).__init__()
self.yang_name = "bucket"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("contents", ("contents", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents))])
self._leafs = OrderedDict([
('start_at', (YLeaf(YType.uint32, 'start-at'), ['int'])),
('duration', (YLeaf(YType.uint32, 'duration'), ['int'])),
('sent', (YLeaf(YType.uint32, 'sent'), ['int'])),
('lost', (YLeaf(YType.uint32, 'lost'), ['int'])),
('corrupt', (YLeaf(YType.uint32, 'corrupt'), ['int'])),
('out_of_order', (YLeaf(YType.uint32, 'out-of-order'), ['int'])),
('duplicates', (YLeaf(YType.uint32, 'duplicates'), ['int'])),
('minimum', (YLeaf(YType.int32, 'minimum'), ['int'])),
('maximum', (YLeaf(YType.int32, 'maximum'), ['int'])),
('time_of_minimum', (YLeaf(YType.uint32, 'time-of-minimum'), ['int'])),
('time_of_maximum', (YLeaf(YType.uint32, 'time-of-maximum'), ['int'])),
('average', (YLeaf(YType.int32, 'average'), ['int'])),
('standard_deviation', (YLeaf(YType.int32, 'standard-deviation'), ['int'])),
('result_count', (YLeaf(YType.uint32, 'result-count'), ['int'])),
('data_sent_count', (YLeaf(YType.uint32, 'data-sent-count'), ['int'])),
('data_lost_count', (YLeaf(YType.uint32, 'data-lost-count'), ['int'])),
('overall_flr', (YLeaf(YType.int32, 'overall-flr'), ['int'])),
('suspect_start_mid_bucket', (YLeaf(YType.boolean, 'suspect-start-mid-bucket'), ['bool'])),
('suspect_schedule_latency', (YLeaf(YType.boolean, 'suspect-schedule-latency'), ['bool'])),
('suspect_send_fail', (YLeaf(YType.boolean, 'suspect-send-fail'), ['bool'])),
('suspect_premature_end', (YLeaf(YType.boolean, 'suspect-premature-end'), ['bool'])),
('suspect_clock_drift', (YLeaf(YType.boolean, 'suspect-clock-drift'), ['bool'])),
('suspect_memory_allocation_failed', (YLeaf(YType.boolean, 'suspect-memory-allocation-failed'), ['bool'])),
('suspect_cleared_mid_bucket', (YLeaf(YType.boolean, 'suspect-cleared-mid-bucket'), ['bool'])),
('suspect_probe_restarted', (YLeaf(YType.boolean, 'suspect-probe-restarted'), ['bool'])),
('suspect_management_latency', (YLeaf(YType.boolean, 'suspect-management-latency'), ['bool'])),
('suspect_multiple_buckets', (YLeaf(YType.boolean, 'suspect-multiple-buckets'), ['bool'])),
('suspect_misordering', (YLeaf(YType.boolean, 'suspect-misordering'), ['bool'])),
('suspect_flr_low_packet_count', (YLeaf(YType.boolean, 'suspect-flr-low-packet-count'), ['bool'])),
('premature_reason', (YLeaf(YType.uint32, 'premature-reason'), ['int'])),
('premature_reason_string', (YLeaf(YType.str, 'premature-reason-string'), ['str'])),
])
self.start_at = None
self.duration = None
self.sent = None
self.lost = None
self.corrupt = None
self.out_of_order = None
self.duplicates = None
self.minimum = None
self.maximum = None
self.time_of_minimum = None
self.time_of_maximum = None
self.average = None
self.standard_deviation = None
self.result_count = None
self.data_sent_count = None
self.data_lost_count = None
self.overall_flr = None
self.suspect_start_mid_bucket = None
self.suspect_schedule_latency = None
self.suspect_send_fail = None
self.suspect_premature_end = None
self.suspect_clock_drift = None
self.suspect_memory_allocation_failed = None
self.suspect_cleared_mid_bucket = None
self.suspect_probe_restarted = None
self.suspect_management_latency = None
self.suspect_multiple_buckets = None
self.suspect_misordering = None
self.suspect_flr_low_packet_count = None
self.premature_reason = None
self.premature_reason_string = None
self.contents = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents()
self.contents.parent = self
self._children_name_map["contents"] = "contents"
self._segment_path = lambda: "bucket"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket, ['start_at', 'duration', 'sent', 'lost', 'corrupt', 'out_of_order', 'duplicates', 'minimum', 'maximum', 'time_of_minimum', 'time_of_maximum', 'average', 'standard_deviation', 'result_count', 'data_sent_count', 'data_lost_count', 'overall_flr', 'suspect_start_mid_bucket', 'suspect_schedule_latency', 'suspect_send_fail', 'suspect_premature_end', 'suspect_clock_drift', 'suspect_memory_allocation_failed', 'suspect_cleared_mid_bucket', 'suspect_probe_restarted', 'suspect_management_latency', 'suspect_multiple_buckets', 'suspect_misordering', 'suspect_flr_low_packet_count', 'premature_reason', 'premature_reason_string'], name, value)
class Contents(Entity):
"""
The contents of the bucket; bins or samples
.. attribute:: aggregated
Result bins in an SLA metric bucket
**type**\: :py:class:`Aggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated>`
.. attribute:: unaggregated
Result samples in an SLA metric bucket
**type**\: :py:class:`Unaggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated>`
.. attribute:: bucket_type
BucketType
**type**\: :py:class:`SlaOperBucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperBucket>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents, self).__init__()
self.yang_name = "contents"
self.yang_parent_name = "bucket"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("aggregated", ("aggregated", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated)), ("unaggregated", ("unaggregated", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated))])
self._leafs = OrderedDict([
('bucket_type', (YLeaf(YType.enumeration, 'bucket-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperBucket', '')])),
])
self.bucket_type = None
self.aggregated = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated()
self.aggregated.parent = self
self._children_name_map["aggregated"] = "aggregated"
self.unaggregated = Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated()
self.unaggregated.parent = self
self._children_name_map["unaggregated"] = "unaggregated"
self._segment_path = lambda: "contents"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/operation-metric/bucket/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents, ['bucket_type'], name, value)
class Aggregated(Entity):
"""
Result bins in an SLA metric bucket
.. attribute:: bins
The bins of an SLA metric bucket
**type**\: list of :py:class:`Bins <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated, self).__init__()
self.yang_name = "aggregated"
self.yang_parent_name = "contents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("bins", ("bins", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins))])
self._leafs = OrderedDict()
self.bins = YList(self)
self._segment_path = lambda: "aggregated"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/operation-metric/bucket/contents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated, [], name, value)
class Bins(Entity):
"""
The bins of an SLA metric bucket
.. attribute:: lower_bound
Lower bound (inclusive) of the bin, in milliseconds or single units of percent. This field is not used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: upper_bound
Upper bound (exclusive) of the bin, in milliseconds or single units of percent. This field is not used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: lower_bound_tenths
Lower bound (inclusive) of the bin, in tenths of percent. This field is only used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: upper_bound_tenths
Upper bound (exclusive) of the bin, in tenths of percent. This field is only used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: sum
The sum of the results in the bin, in microseconds or millionths of a percent
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: count
The total number of results in the bin
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins, self).__init__()
self.yang_name = "bins"
self.yang_parent_name = "aggregated"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lower_bound', (YLeaf(YType.int32, 'lower-bound'), ['int'])),
('upper_bound', (YLeaf(YType.int32, 'upper-bound'), ['int'])),
('lower_bound_tenths', (YLeaf(YType.int32, 'lower-bound-tenths'), ['int'])),
('upper_bound_tenths', (YLeaf(YType.int32, 'upper-bound-tenths'), ['int'])),
('sum', (YLeaf(YType.int64, 'sum'), ['int'])),
('count', (YLeaf(YType.uint32, 'count'), ['int'])),
])
self.lower_bound = None
self.upper_bound = None
self.lower_bound_tenths = None
self.upper_bound_tenths = None
self.sum = None
self.count = None
self._segment_path = lambda: "bins"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/operation-metric/bucket/contents/aggregated/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins, ['lower_bound', 'upper_bound', 'lower_bound_tenths', 'upper_bound_tenths', 'sum', 'count'], name, value)
class Unaggregated(Entity):
"""
Result samples in an SLA metric bucket
.. attribute:: sample
The samples of an SLA metric bucket
**type**\: list of :py:class:`Sample <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated, self).__init__()
self.yang_name = "unaggregated"
self.yang_parent_name = "contents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sample", ("sample", Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample))])
self._leafs = OrderedDict()
self.sample = YList(self)
self._segment_path = lambda: "unaggregated"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/operation-metric/bucket/contents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated, [], name, value)
class Sample(Entity):
"""
The samples of an SLA metric bucket
.. attribute:: sent_at
The time (in milliseconds relative to the start time of the bucket) that the sample was sent at
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: sent
Whether the sample packet was sucessfully sent
**type**\: bool
.. attribute:: timed_out
Whether the sample packet timed out
**type**\: bool
.. attribute:: corrupt
Whether the sample packet was corrupt
**type**\: bool
.. attribute:: out_of_order
Whether the sample packet was received out\-of\-order
**type**\: bool
.. attribute:: no_data_packets
Whether a measurement could not be made because no data packets were sent in the sample period. Only applicable for LMM measurements
**type**\: bool
.. attribute:: result
The result (in microseconds or millionths of a percent) of the sample, if available
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: frames_sent
For FLR measurements, the number of frames sent, if available
**type**\: int
**range:** 0..4294967295
.. attribute:: frames_lost
For FLR measurements, the number of frames lost, if available
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample, self).__init__()
self.yang_name = "sample"
self.yang_parent_name = "unaggregated"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sent_at', (YLeaf(YType.uint32, 'sent-at'), ['int'])),
('sent', (YLeaf(YType.boolean, 'sent'), ['bool'])),
('timed_out', (YLeaf(YType.boolean, 'timed-out'), ['bool'])),
('corrupt', (YLeaf(YType.boolean, 'corrupt'), ['bool'])),
('out_of_order', (YLeaf(YType.boolean, 'out-of-order'), ['bool'])),
('no_data_packets', (YLeaf(YType.boolean, 'no-data-packets'), ['bool'])),
('result', (YLeaf(YType.int32, 'result'), ['int'])),
('frames_sent', (YLeaf(YType.uint32, 'frames-sent'), ['int'])),
('frames_lost', (YLeaf(YType.uint32, 'frames-lost'), ['int'])),
])
self.sent_at = None
self.sent = None
self.timed_out = None
self.corrupt = None
self.out_of_order = None
self.no_data_packets = None
self.result = None
self.frames_sent = None
self.frames_lost = None
self._segment_path = lambda: "sample"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-currents/statistics-on-demand-current/operation-metric/bucket/contents/unaggregated/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandCurrents.StatisticsOnDemandCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample, ['sent_at', 'sent', 'timed_out', 'corrupt', 'out_of_order', 'no_data_packets', 'result', 'frames_sent', 'frames_lost'], name, value)
class Operations(Entity):
"""
Table of SLA operations
.. attribute:: operation_
SLA operation to get operation data for
**type**\: list of :py:class:`Operation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations, self).__init__()
self.yang_name = "operations"
self.yang_parent_name = "ethernet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("operation", ("operation_", Sla.Protocols.Ethernet.Operations.Operation))])
self._leafs = OrderedDict()
self.operation_ = YList(self)
self._segment_path = lambda: "operations"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations, [], name, value)
class Operation(Entity):
"""
SLA operation to get operation data for
.. attribute:: profile_name
Profile Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: domain_name
Domain name
**type**\: str
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: mep_id
MEP ID in the range 1 to 8191. Either MEP ID or MAC address must be specified
**type**\: int
**range:** 1..8191
.. attribute:: mac_address
Unicast MAC Address in xxxx.xxxx.xxxx format. Either MEP ID or MAC address must be specified
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: profile_options
Options that are only valid if the operation has a profile
**type**\: :py:class:`ProfileOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions>`
.. attribute:: specific_options
Options specific to the type of operation
**type**\: :py:class:`SpecificOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions>`
.. attribute:: display_short
Short display name used by the operation
**type**\: str
.. attribute:: display_long
Long display name used by the operation
**type**\: str
.. attribute:: last_run
Time that the last probe for the operation was run, NULL if never run
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation, self).__init__()
self.yang_name = "operation"
self.yang_parent_name = "operations"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("profile-options", ("profile_options", Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions)), ("specific-options", ("specific_options", Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions))])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
('domain_name', (YLeaf(YType.str, 'domain-name'), ['str'])),
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('mep_id', (YLeaf(YType.uint32, 'mep-id'), ['int'])),
('mac_address', (YLeaf(YType.str, 'mac-address'), ['str'])),
('display_short', (YLeaf(YType.str, 'display-short'), ['str'])),
('display_long', (YLeaf(YType.str, 'display-long'), ['str'])),
('last_run', (YLeaf(YType.uint32, 'last-run'), ['int'])),
])
self.profile_name = None
self.domain_name = None
self.interface_name = None
self.mep_id = None
self.mac_address = None
self.display_short = None
self.display_long = None
self.last_run = None
self.profile_options = Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions()
self.profile_options.parent = self
self._children_name_map["profile_options"] = "profile-options"
self.specific_options = Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions()
self.specific_options.parent = self
self._children_name_map["specific_options"] = "specific-options"
self._segment_path = lambda: "operation"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation, ['profile_name', 'domain_name', 'interface_name', 'mep_id', 'mac_address', 'display_short', 'display_long', 'last_run'], name, value)
class ProfileOptions(Entity):
"""
Options that are only valid if the operation has
a profile
.. attribute:: packet_padding
Configuration of the packet padding
**type**\: :py:class:`PacketPadding <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding>`
.. attribute:: priority
Priority at which to send the packet, if configured
**type**\: :py:class:`Priority <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority>`
.. attribute:: operation_schedule
Operation schedule
**type**\: :py:class:`OperationSchedule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule>`
.. attribute:: probe_type
Type of probe used by the operation
**type**\: str
.. attribute:: packets_per_burst
Number of packets sent per burst
**type**\: int
**range:** 0..65535
.. attribute:: inter_packet_interval
Interval between packets within a burst in milliseconds
**type**\: int
**range:** 0..65535
**units**\: millisecond
.. attribute:: bursts_per_probe
Number of bursts sent per probe
**type**\: int
**range:** 0..4294967295
.. attribute:: inter_burst_interval
Interval between bursts within a probe in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: flr_calculation_interval
Interval between FLR calculations for SLM, in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: operation_metric
Array of the metrics that are measured by the operation
**type**\: list of :py:class:`OperationMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions, self).__init__()
self.yang_name = "profile-options"
self.yang_parent_name = "operation"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("packet-padding", ("packet_padding", Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding)), ("priority", ("priority", Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority)), ("operation-schedule", ("operation_schedule", Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule)), ("operation-metric", ("operation_metric", Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric))])
self._leafs = OrderedDict([
('probe_type', (YLeaf(YType.str, 'probe-type'), ['str'])),
('packets_per_burst', (YLeaf(YType.uint16, 'packets-per-burst'), ['int'])),
('inter_packet_interval', (YLeaf(YType.uint16, 'inter-packet-interval'), ['int'])),
('bursts_per_probe', (YLeaf(YType.uint32, 'bursts-per-probe'), ['int'])),
('inter_burst_interval', (YLeaf(YType.uint32, 'inter-burst-interval'), ['int'])),
('flr_calculation_interval', (YLeaf(YType.uint32, 'flr-calculation-interval'), ['int'])),
])
self.probe_type = None
self.packets_per_burst = None
self.inter_packet_interval = None
self.bursts_per_probe = None
self.inter_burst_interval = None
self.flr_calculation_interval = None
self.packet_padding = Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding()
self.packet_padding.parent = self
self._children_name_map["packet_padding"] = "packet-padding"
self.priority = Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority()
self.priority.parent = self
self._children_name_map["priority"] = "priority"
self.operation_schedule = Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule()
self.operation_schedule.parent = self
self._children_name_map["operation_schedule"] = "operation-schedule"
self.operation_metric = YList(self)
self._segment_path = lambda: "profile-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions, ['probe_type', 'packets_per_burst', 'inter_packet_interval', 'bursts_per_probe', 'inter_burst_interval', 'flr_calculation_interval'], name, value)
class PacketPadding(Entity):
"""
Configuration of the packet padding
.. attribute:: packet_pad_size
Size that packets are being padded to
**type**\: int
**range:** 0..65535
.. attribute:: test_pattern_pad_scheme
Test pattern scheme that is used in the packet padding
**type**\: :py:class:`SlaOperTestPatternScheme <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperTestPatternScheme>`
.. attribute:: test_pattern_pad_hex_string
Hex string that is used in the packet padding
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding, self).__init__()
self.yang_name = "packet-padding"
self.yang_parent_name = "profile-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('packet_pad_size', (YLeaf(YType.uint16, 'packet-pad-size'), ['int'])),
('test_pattern_pad_scheme', (YLeaf(YType.enumeration, 'test-pattern-pad-scheme'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperTestPatternScheme', '')])),
('test_pattern_pad_hex_string', (YLeaf(YType.uint32, 'test-pattern-pad-hex-string'), ['int'])),
])
self.packet_pad_size = None
self.test_pattern_pad_scheme = None
self.test_pattern_pad_hex_string = None
self._segment_path = lambda: "packet-padding"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/profile-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.PacketPadding, ['packet_pad_size', 'test_pattern_pad_scheme', 'test_pattern_pad_hex_string'], name, value)
class Priority(Entity):
"""
Priority at which to send the packet, if
configured
.. attribute:: priority_type
PriorityType
**type**\: :py:class:`SlaOperPacketPriority <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperPacketPriority>`
.. attribute:: cos
3\-bit COS priority value applied to packets
**type**\: int
**range:** 0..255
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority, self).__init__()
self.yang_name = "priority"
self.yang_parent_name = "profile-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('priority_type', (YLeaf(YType.enumeration, 'priority-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperPacketPriority', '')])),
('cos', (YLeaf(YType.uint8, 'cos'), ['int'])),
])
self.priority_type = None
self.cos = None
self._segment_path = lambda: "priority"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/profile-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.Priority, ['priority_type', 'cos'], name, value)
class OperationSchedule(Entity):
"""
Operation schedule
.. attribute:: start_time
Start time of the first probe, in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time_configured
Whether or not the operation start time was explicitly configured
**type**\: bool
.. attribute:: schedule_duration
Duration of a probe for the operation in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: schedule_interval
Interval between the start times of consecutive probes, in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule, self).__init__()
self.yang_name = "operation-schedule"
self.yang_parent_name = "profile-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time', (YLeaf(YType.uint32, 'start-time'), ['int'])),
('start_time_configured', (YLeaf(YType.boolean, 'start-time-configured'), ['bool'])),
('schedule_duration', (YLeaf(YType.uint32, 'schedule-duration'), ['int'])),
('schedule_interval', (YLeaf(YType.uint32, 'schedule-interval'), ['int'])),
])
self.start_time = None
self.start_time_configured = None
self.schedule_duration = None
self.schedule_interval = None
self._segment_path = lambda: "operation-schedule"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/profile-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationSchedule, ['start_time', 'start_time_configured', 'schedule_duration', 'schedule_interval'], name, value)
class OperationMetric(Entity):
"""
Array of the metrics that are measured by the
operation
.. attribute:: metric_config
Configuration of the metric
**type**\: :py:class:`MetricConfig <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig>`
.. attribute:: current_buckets_archive
Number of valid buckets currently in the buckets archive
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric, self).__init__()
self.yang_name = "operation-metric"
self.yang_parent_name = "profile-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("metric-config", ("metric_config", Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig))])
self._leafs = OrderedDict([
('current_buckets_archive', (YLeaf(YType.uint32, 'current-buckets-archive'), ['int'])),
])
self.current_buckets_archive = None
self.metric_config = Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig()
self.metric_config.parent = self
self._children_name_map["metric_config"] = "metric-config"
self._segment_path = lambda: "operation-metric"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/profile-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric, ['current_buckets_archive'], name, value)
class MetricConfig(Entity):
"""
Configuration of the metric
.. attribute:: metric_type
Type of metric to which this configuration applies
**type**\: :py:class:`SlaRecordableMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaRecordableMetric>`
.. attribute:: bins_count
Total number of bins into which to aggregate. 0 if no aggregation
**type**\: int
**range:** 0..65535
.. attribute:: bins_width
Width of each bin into which to aggregate. 0 if no aggregation. For SLM, the units of this value are in single units of percent; for LMM they are in tenths of percent; for other measurements they are in milliseconds
**type**\: int
**range:** 0..65535
.. attribute:: bucket_size
Size of buckets into which measurements are collected
**type**\: int
**range:** 0..255
.. attribute:: bucket_size_unit
Whether bucket size is 'per\-probe' or 'probes'
**type**\: :py:class:`SlaBucketSize <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaBucketSize>`
.. attribute:: buckets_archive
Maximum number of buckets to store in memory
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig, self).__init__()
self.yang_name = "metric-config"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric_type', (YLeaf(YType.enumeration, 'metric-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetric', '')])),
('bins_count', (YLeaf(YType.uint16, 'bins-count'), ['int'])),
('bins_width', (YLeaf(YType.uint16, 'bins-width'), ['int'])),
('bucket_size', (YLeaf(YType.uint8, 'bucket-size'), ['int'])),
('bucket_size_unit', (YLeaf(YType.enumeration, 'bucket-size-unit'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSize', '')])),
('buckets_archive', (YLeaf(YType.uint32, 'buckets-archive'), ['int'])),
])
self.metric_type = None
self.bins_count = None
self.bins_width = None
self.bucket_size = None
self.bucket_size_unit = None
self.buckets_archive = None
self._segment_path = lambda: "metric-config"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/profile-options/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.ProfileOptions.OperationMetric.MetricConfig, ['metric_type', 'bins_count', 'bins_width', 'bucket_size', 'bucket_size_unit', 'buckets_archive'], name, value)
class SpecificOptions(Entity):
"""
Options specific to the type of operation
.. attribute:: configured_operation_options
Parameters for a configured operation
**type**\: :py:class:`ConfiguredOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions>`
.. attribute:: ondemand_operation_options
Parameters for an ondemand operation
**type**\: :py:class:`OndemandOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions>`
.. attribute:: oper_type
OperType
**type**\: :py:class:`SlaOperOperation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperOperation>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions, self).__init__()
self.yang_name = "specific-options"
self.yang_parent_name = "operation"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("configured-operation-options", ("configured_operation_options", Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions)), ("ondemand-operation-options", ("ondemand_operation_options", Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions))])
self._leafs = OrderedDict([
('oper_type', (YLeaf(YType.enumeration, 'oper-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperation', '')])),
])
self.oper_type = None
self.configured_operation_options = Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions()
self.configured_operation_options.parent = self
self._children_name_map["configured_operation_options"] = "configured-operation-options"
self.ondemand_operation_options = Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions()
self.ondemand_operation_options.parent = self
self._children_name_map["ondemand_operation_options"] = "ondemand-operation-options"
self._segment_path = lambda: "specific-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions, ['oper_type'], name, value)
class ConfiguredOperationOptions(Entity):
"""
Parameters for a configured operation
.. attribute:: profile_name
Name of the profile used by the operation
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions, self).__init__()
self.yang_name = "configured-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
])
self.profile_name = None
self._segment_path = lambda: "configured-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.ConfiguredOperationOptions, ['profile_name'], name, value)
class OndemandOperationOptions(Entity):
"""
Parameters for an ondemand operation
.. attribute:: ondemand_operation_id
ID of the ondemand operation
**type**\: int
**range:** 0..4294967295
.. attribute:: probe_count
Total number of probes sent during the operation
**type**\: int
**range:** 0..255
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions, self).__init__()
self.yang_name = "ondemand-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ondemand_operation_id', (YLeaf(YType.uint32, 'ondemand-operation-id'), ['int'])),
('probe_count', (YLeaf(YType.uint8, 'probe-count'), ['int'])),
])
self.ondemand_operation_id = None
self.probe_count = None
self._segment_path = lambda: "ondemand-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/operations/operation/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.Operations.Operation.SpecificOptions.OndemandOperationOptions, ['ondemand_operation_id', 'probe_count'], name, value)
class StatisticsHistoricals(Entity):
"""
Table of historical statistics for SLA
operations
.. attribute:: statistics_historical
Historical statistics data for an SLA configured operation
**type**\: list of :py:class:`StatisticsHistorical <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals, self).__init__()
self.yang_name = "statistics-historicals"
self.yang_parent_name = "ethernet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("statistics-historical", ("statistics_historical", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical))])
self._leafs = OrderedDict()
self.statistics_historical = YList(self)
self._segment_path = lambda: "statistics-historicals"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals, [], name, value)
class StatisticsHistorical(Entity):
"""
Historical statistics data for an SLA
configured operation
.. attribute:: profile_name
Profile Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: domain_name
Domain name
**type**\: str
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: mep_id
MEP ID in the range 1 to 8191. Either MEP ID or MAC address must be specified
**type**\: int
**range:** 1..8191
.. attribute:: mac_address
Unicast MAC Address in xxxx.xxxx.xxxx format. Either MEP ID or MAC address must be specified
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: specific_options
Options specific to the type of operation
**type**\: :py:class:`SpecificOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions>`
.. attribute:: operation_schedule
Operation schedule
**type**\: :py:class:`OperationSchedule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule>`
.. attribute:: probe_type
Type of probe used by the operation
**type**\: str
.. attribute:: display_short
Short display name used by the operation
**type**\: str
.. attribute:: display_long
Long display name used by the operation
**type**\: str
.. attribute:: flr_calculation_interval
Interval between FLR calculations for SLM, in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: operation_metric
Metrics gathered for the operation
**type**\: list of :py:class:`OperationMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical, self).__init__()
self.yang_name = "statistics-historical"
self.yang_parent_name = "statistics-historicals"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("specific-options", ("specific_options", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions)), ("operation-schedule", ("operation_schedule", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule)), ("operation-metric", ("operation_metric", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric))])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
('domain_name', (YLeaf(YType.str, 'domain-name'), ['str'])),
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('mep_id', (YLeaf(YType.uint32, 'mep-id'), ['int'])),
('mac_address', (YLeaf(YType.str, 'mac-address'), ['str'])),
('probe_type', (YLeaf(YType.str, 'probe-type'), ['str'])),
('display_short', (YLeaf(YType.str, 'display-short'), ['str'])),
('display_long', (YLeaf(YType.str, 'display-long'), ['str'])),
('flr_calculation_interval', (YLeaf(YType.uint32, 'flr-calculation-interval'), ['int'])),
])
self.profile_name = None
self.domain_name = None
self.interface_name = None
self.mep_id = None
self.mac_address = None
self.probe_type = None
self.display_short = None
self.display_long = None
self.flr_calculation_interval = None
self.specific_options = Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions()
self.specific_options.parent = self
self._children_name_map["specific_options"] = "specific-options"
self.operation_schedule = Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule()
self.operation_schedule.parent = self
self._children_name_map["operation_schedule"] = "operation-schedule"
self.operation_metric = YList(self)
self._segment_path = lambda: "statistics-historical"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical, ['profile_name', 'domain_name', 'interface_name', 'mep_id', 'mac_address', 'probe_type', 'display_short', 'display_long', 'flr_calculation_interval'], name, value)
class SpecificOptions(Entity):
"""
Options specific to the type of operation
.. attribute:: configured_operation_options
Parameters for a configured operation
**type**\: :py:class:`ConfiguredOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions>`
.. attribute:: ondemand_operation_options
Parameters for an ondemand operation
**type**\: :py:class:`OndemandOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions>`
.. attribute:: oper_type
OperType
**type**\: :py:class:`SlaOperOperation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperOperation>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions, self).__init__()
self.yang_name = "specific-options"
self.yang_parent_name = "statistics-historical"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("configured-operation-options", ("configured_operation_options", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions)), ("ondemand-operation-options", ("ondemand_operation_options", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions))])
self._leafs = OrderedDict([
('oper_type', (YLeaf(YType.enumeration, 'oper-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperation', '')])),
])
self.oper_type = None
self.configured_operation_options = Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions()
self.configured_operation_options.parent = self
self._children_name_map["configured_operation_options"] = "configured-operation-options"
self.ondemand_operation_options = Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions()
self.ondemand_operation_options.parent = self
self._children_name_map["ondemand_operation_options"] = "ondemand-operation-options"
self._segment_path = lambda: "specific-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions, ['oper_type'], name, value)
class ConfiguredOperationOptions(Entity):
"""
Parameters for a configured operation
.. attribute:: profile_name
Name of the profile used by the operation
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions, self).__init__()
self.yang_name = "configured-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
])
self.profile_name = None
self._segment_path = lambda: "configured-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.ConfiguredOperationOptions, ['profile_name'], name, value)
class OndemandOperationOptions(Entity):
"""
Parameters for an ondemand operation
.. attribute:: ondemand_operation_id
ID of the ondemand operation
**type**\: int
**range:** 0..4294967295
.. attribute:: probe_count
Total number of probes sent during the operation
**type**\: int
**range:** 0..255
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions, self).__init__()
self.yang_name = "ondemand-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ondemand_operation_id', (YLeaf(YType.uint32, 'ondemand-operation-id'), ['int'])),
('probe_count', (YLeaf(YType.uint8, 'probe-count'), ['int'])),
])
self.ondemand_operation_id = None
self.probe_count = None
self._segment_path = lambda: "ondemand-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.SpecificOptions.OndemandOperationOptions, ['ondemand_operation_id', 'probe_count'], name, value)
class OperationSchedule(Entity):
"""
Operation schedule
.. attribute:: start_time
Start time of the first probe, in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time_configured
Whether or not the operation start time was explicitly configured
**type**\: bool
.. attribute:: schedule_duration
Duration of a probe for the operation in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: schedule_interval
Interval between the start times of consecutive probes, in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule, self).__init__()
self.yang_name = "operation-schedule"
self.yang_parent_name = "statistics-historical"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time', (YLeaf(YType.uint32, 'start-time'), ['int'])),
('start_time_configured', (YLeaf(YType.boolean, 'start-time-configured'), ['bool'])),
('schedule_duration', (YLeaf(YType.uint32, 'schedule-duration'), ['int'])),
('schedule_interval', (YLeaf(YType.uint32, 'schedule-interval'), ['int'])),
])
self.start_time = None
self.start_time_configured = None
self.schedule_duration = None
self.schedule_interval = None
self._segment_path = lambda: "operation-schedule"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationSchedule, ['start_time', 'start_time_configured', 'schedule_duration', 'schedule_interval'], name, value)
class OperationMetric(Entity):
"""
Metrics gathered for the operation
.. attribute:: config
Configuration of the metric
**type**\: :py:class:`Config <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config>`
.. attribute:: bucket
Buckets stored for the metric
**type**\: list of :py:class:`Bucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric, self).__init__()
self.yang_name = "operation-metric"
self.yang_parent_name = "statistics-historical"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("config", ("config", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config)), ("bucket", ("bucket", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket))])
self._leafs = OrderedDict()
self.config = Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self.bucket = YList(self)
self._segment_path = lambda: "operation-metric"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric, [], name, value)
class Config(Entity):
"""
Configuration of the metric
.. attribute:: metric_type
Type of metric to which this configuration applies
**type**\: :py:class:`SlaRecordableMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaRecordableMetric>`
.. attribute:: bins_count
Total number of bins into which to aggregate. 0 if no aggregation
**type**\: int
**range:** 0..65535
.. attribute:: bins_width
Width of each bin into which to aggregate. 0 if no aggregation. For SLM, the units of this value are in single units of percent; for LMM they are in tenths of percent; for other measurements they are in milliseconds
**type**\: int
**range:** 0..65535
.. attribute:: bucket_size
Size of buckets into which measurements are collected
**type**\: int
**range:** 0..255
.. attribute:: bucket_size_unit
Whether bucket size is 'per\-probe' or 'probes'
**type**\: :py:class:`SlaBucketSize <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaBucketSize>`
.. attribute:: buckets_archive
Maximum number of buckets to store in memory
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric_type', (YLeaf(YType.enumeration, 'metric-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetric', '')])),
('bins_count', (YLeaf(YType.uint16, 'bins-count'), ['int'])),
('bins_width', (YLeaf(YType.uint16, 'bins-width'), ['int'])),
('bucket_size', (YLeaf(YType.uint8, 'bucket-size'), ['int'])),
('bucket_size_unit', (YLeaf(YType.enumeration, 'bucket-size-unit'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSize', '')])),
('buckets_archive', (YLeaf(YType.uint32, 'buckets-archive'), ['int'])),
])
self.metric_type = None
self.bins_count = None
self.bins_width = None
self.bucket_size = None
self.bucket_size_unit = None
self.buckets_archive = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Config, ['metric_type', 'bins_count', 'bins_width', 'bucket_size', 'bucket_size_unit', 'buckets_archive'], name, value)
class Bucket(Entity):
"""
Buckets stored for the metric
.. attribute:: contents
The contents of the bucket; bins or samples
**type**\: :py:class:`Contents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents>`
.. attribute:: start_at
Absolute time that the bucket started being filled at
**type**\: int
**range:** 0..4294967295
.. attribute:: duration
Length of time for which the bucket is being filled in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: sent
Number of packets sent in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: lost
Number of lost packets in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: corrupt
Number of corrupt packets in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_order
Number of packets recieved out\-of\-order in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: duplicates
Number of duplicate packets received in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: minimum
Overall minimum result in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: maximum
Overall minimum result in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: time_of_minimum
Absolute time that the minimum value was recorded
**type**\: int
**range:** 0..4294967295
.. attribute:: time_of_maximum
Absolute time that the maximum value was recorded
**type**\: int
**range:** 0..4294967295
.. attribute:: average
Mean of the results in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: standard_deviation
Standard deviation of the results in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: result_count
The count of samples collected in the bucket
**type**\: int
**range:** 0..4294967295
.. attribute:: data_sent_count
The number of data packets sent across the bucket, used in the calculation of overall FLR
**type**\: int
**range:** 0..4294967295
.. attribute:: data_lost_count
The number of data packets lost across the bucket, used in the calculation of overall FLR
**type**\: int
**range:** 0..4294967295
.. attribute:: overall_flr
Frame Loss Ratio across the whole bucket, in millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: suspect_start_mid_bucket
Results suspect due to a probe starting mid\-way through a bucket
**type**\: bool
.. attribute:: suspect_schedule_latency
Results suspect due to scheduling latency causing one or more packets to not be sent
**type**\: bool
.. attribute:: suspect_send_fail
Results suspect due to failure to send one or more packets
**type**\: bool
.. attribute:: suspect_premature_end
Results suspect due to a probe ending prematurely
**type**\: bool
.. attribute:: suspect_clock_drift
Results suspect as more than 10 seconds time drift detected
**type**\: bool
.. attribute:: suspect_memory_allocation_failed
Results suspect due to a memory allocation failure
**type**\: bool
.. attribute:: suspect_cleared_mid_bucket
Results suspect as bucket was cleared mid\-way through being filled
**type**\: bool
.. attribute:: suspect_probe_restarted
Results suspect as probe restarted mid\-way through the bucket
**type**\: bool
.. attribute:: suspect_management_latency
Results suspect as processing of results has been delayed
**type**\: bool
.. attribute:: suspect_multiple_buckets
Results suspect as the probe has been configured across multiple buckets
**type**\: bool
.. attribute:: suspect_misordering
Results suspect as misordering has been detected , affecting results
**type**\: bool
.. attribute:: suspect_flr_low_packet_count
Results suspect as FLR calculated based on a low packet count
**type**\: bool
.. attribute:: premature_reason
If the probe ended prematurely, the error that caused a probe to end
**type**\: int
**range:** 0..4294967295
.. attribute:: premature_reason_string
Description of the error code that caused the probe to end prematurely. For informational purposes only
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket, self).__init__()
self.yang_name = "bucket"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("contents", ("contents", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents))])
self._leafs = OrderedDict([
('start_at', (YLeaf(YType.uint32, 'start-at'), ['int'])),
('duration', (YLeaf(YType.uint32, 'duration'), ['int'])),
('sent', (YLeaf(YType.uint32, 'sent'), ['int'])),
('lost', (YLeaf(YType.uint32, 'lost'), ['int'])),
('corrupt', (YLeaf(YType.uint32, 'corrupt'), ['int'])),
('out_of_order', (YLeaf(YType.uint32, 'out-of-order'), ['int'])),
('duplicates', (YLeaf(YType.uint32, 'duplicates'), ['int'])),
('minimum', (YLeaf(YType.int32, 'minimum'), ['int'])),
('maximum', (YLeaf(YType.int32, 'maximum'), ['int'])),
('time_of_minimum', (YLeaf(YType.uint32, 'time-of-minimum'), ['int'])),
('time_of_maximum', (YLeaf(YType.uint32, 'time-of-maximum'), ['int'])),
('average', (YLeaf(YType.int32, 'average'), ['int'])),
('standard_deviation', (YLeaf(YType.int32, 'standard-deviation'), ['int'])),
('result_count', (YLeaf(YType.uint32, 'result-count'), ['int'])),
('data_sent_count', (YLeaf(YType.uint32, 'data-sent-count'), ['int'])),
('data_lost_count', (YLeaf(YType.uint32, 'data-lost-count'), ['int'])),
('overall_flr', (YLeaf(YType.int32, 'overall-flr'), ['int'])),
('suspect_start_mid_bucket', (YLeaf(YType.boolean, 'suspect-start-mid-bucket'), ['bool'])),
('suspect_schedule_latency', (YLeaf(YType.boolean, 'suspect-schedule-latency'), ['bool'])),
('suspect_send_fail', (YLeaf(YType.boolean, 'suspect-send-fail'), ['bool'])),
('suspect_premature_end', (YLeaf(YType.boolean, 'suspect-premature-end'), ['bool'])),
('suspect_clock_drift', (YLeaf(YType.boolean, 'suspect-clock-drift'), ['bool'])),
('suspect_memory_allocation_failed', (YLeaf(YType.boolean, 'suspect-memory-allocation-failed'), ['bool'])),
('suspect_cleared_mid_bucket', (YLeaf(YType.boolean, 'suspect-cleared-mid-bucket'), ['bool'])),
('suspect_probe_restarted', (YLeaf(YType.boolean, 'suspect-probe-restarted'), ['bool'])),
('suspect_management_latency', (YLeaf(YType.boolean, 'suspect-management-latency'), ['bool'])),
('suspect_multiple_buckets', (YLeaf(YType.boolean, 'suspect-multiple-buckets'), ['bool'])),
('suspect_misordering', (YLeaf(YType.boolean, 'suspect-misordering'), ['bool'])),
('suspect_flr_low_packet_count', (YLeaf(YType.boolean, 'suspect-flr-low-packet-count'), ['bool'])),
('premature_reason', (YLeaf(YType.uint32, 'premature-reason'), ['int'])),
('premature_reason_string', (YLeaf(YType.str, 'premature-reason-string'), ['str'])),
])
self.start_at = None
self.duration = None
self.sent = None
self.lost = None
self.corrupt = None
self.out_of_order = None
self.duplicates = None
self.minimum = None
self.maximum = None
self.time_of_minimum = None
self.time_of_maximum = None
self.average = None
self.standard_deviation = None
self.result_count = None
self.data_sent_count = None
self.data_lost_count = None
self.overall_flr = None
self.suspect_start_mid_bucket = None
self.suspect_schedule_latency = None
self.suspect_send_fail = None
self.suspect_premature_end = None
self.suspect_clock_drift = None
self.suspect_memory_allocation_failed = None
self.suspect_cleared_mid_bucket = None
self.suspect_probe_restarted = None
self.suspect_management_latency = None
self.suspect_multiple_buckets = None
self.suspect_misordering = None
self.suspect_flr_low_packet_count = None
self.premature_reason = None
self.premature_reason_string = None
self.contents = Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents()
self.contents.parent = self
self._children_name_map["contents"] = "contents"
self._segment_path = lambda: "bucket"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket, ['start_at', 'duration', 'sent', 'lost', 'corrupt', 'out_of_order', 'duplicates', 'minimum', 'maximum', 'time_of_minimum', 'time_of_maximum', 'average', 'standard_deviation', 'result_count', 'data_sent_count', 'data_lost_count', 'overall_flr', 'suspect_start_mid_bucket', 'suspect_schedule_latency', 'suspect_send_fail', 'suspect_premature_end', 'suspect_clock_drift', 'suspect_memory_allocation_failed', 'suspect_cleared_mid_bucket', 'suspect_probe_restarted', 'suspect_management_latency', 'suspect_multiple_buckets', 'suspect_misordering', 'suspect_flr_low_packet_count', 'premature_reason', 'premature_reason_string'], name, value)
class Contents(Entity):
"""
The contents of the bucket; bins or samples
.. attribute:: aggregated
Result bins in an SLA metric bucket
**type**\: :py:class:`Aggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated>`
.. attribute:: unaggregated
Result samples in an SLA metric bucket
**type**\: :py:class:`Unaggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated>`
.. attribute:: bucket_type
BucketType
**type**\: :py:class:`SlaOperBucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperBucket>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents, self).__init__()
self.yang_name = "contents"
self.yang_parent_name = "bucket"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("aggregated", ("aggregated", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated)), ("unaggregated", ("unaggregated", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated))])
self._leafs = OrderedDict([
('bucket_type', (YLeaf(YType.enumeration, 'bucket-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperBucket', '')])),
])
self.bucket_type = None
self.aggregated = Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated()
self.aggregated.parent = self
self._children_name_map["aggregated"] = "aggregated"
self.unaggregated = Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated()
self.unaggregated.parent = self
self._children_name_map["unaggregated"] = "unaggregated"
self._segment_path = lambda: "contents"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/operation-metric/bucket/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents, ['bucket_type'], name, value)
class Aggregated(Entity):
"""
Result bins in an SLA metric bucket
.. attribute:: bins
The bins of an SLA metric bucket
**type**\: list of :py:class:`Bins <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated, self).__init__()
self.yang_name = "aggregated"
self.yang_parent_name = "contents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("bins", ("bins", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins))])
self._leafs = OrderedDict()
self.bins = YList(self)
self._segment_path = lambda: "aggregated"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/operation-metric/bucket/contents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated, [], name, value)
class Bins(Entity):
"""
The bins of an SLA metric bucket
.. attribute:: lower_bound
Lower bound (inclusive) of the bin, in milliseconds or single units of percent. This field is not used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: upper_bound
Upper bound (exclusive) of the bin, in milliseconds or single units of percent. This field is not used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: lower_bound_tenths
Lower bound (inclusive) of the bin, in tenths of percent. This field is only used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: upper_bound_tenths
Upper bound (exclusive) of the bin, in tenths of percent. This field is only used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: sum
The sum of the results in the bin, in microseconds or millionths of a percent
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: count
The total number of results in the bin
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins, self).__init__()
self.yang_name = "bins"
self.yang_parent_name = "aggregated"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lower_bound', (YLeaf(YType.int32, 'lower-bound'), ['int'])),
('upper_bound', (YLeaf(YType.int32, 'upper-bound'), ['int'])),
('lower_bound_tenths', (YLeaf(YType.int32, 'lower-bound-tenths'), ['int'])),
('upper_bound_tenths', (YLeaf(YType.int32, 'upper-bound-tenths'), ['int'])),
('sum', (YLeaf(YType.int64, 'sum'), ['int'])),
('count', (YLeaf(YType.uint32, 'count'), ['int'])),
])
self.lower_bound = None
self.upper_bound = None
self.lower_bound_tenths = None
self.upper_bound_tenths = None
self.sum = None
self.count = None
self._segment_path = lambda: "bins"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/operation-metric/bucket/contents/aggregated/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins, ['lower_bound', 'upper_bound', 'lower_bound_tenths', 'upper_bound_tenths', 'sum', 'count'], name, value)
class Unaggregated(Entity):
"""
Result samples in an SLA metric bucket
.. attribute:: sample
The samples of an SLA metric bucket
**type**\: list of :py:class:`Sample <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated, self).__init__()
self.yang_name = "unaggregated"
self.yang_parent_name = "contents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sample", ("sample", Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample))])
self._leafs = OrderedDict()
self.sample = YList(self)
self._segment_path = lambda: "unaggregated"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/operation-metric/bucket/contents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated, [], name, value)
class Sample(Entity):
"""
The samples of an SLA metric bucket
.. attribute:: sent_at
The time (in milliseconds relative to the start time of the bucket) that the sample was sent at
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: sent
Whether the sample packet was sucessfully sent
**type**\: bool
.. attribute:: timed_out
Whether the sample packet timed out
**type**\: bool
.. attribute:: corrupt
Whether the sample packet was corrupt
**type**\: bool
.. attribute:: out_of_order
Whether the sample packet was received out\-of\-order
**type**\: bool
.. attribute:: no_data_packets
Whether a measurement could not be made because no data packets were sent in the sample period. Only applicable for LMM measurements
**type**\: bool
.. attribute:: result
The result (in microseconds or millionths of a percent) of the sample, if available
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: frames_sent
For FLR measurements, the number of frames sent, if available
**type**\: int
**range:** 0..4294967295
.. attribute:: frames_lost
For FLR measurements, the number of frames lost, if available
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample, self).__init__()
self.yang_name = "sample"
self.yang_parent_name = "unaggregated"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sent_at', (YLeaf(YType.uint32, 'sent-at'), ['int'])),
('sent', (YLeaf(YType.boolean, 'sent'), ['bool'])),
('timed_out', (YLeaf(YType.boolean, 'timed-out'), ['bool'])),
('corrupt', (YLeaf(YType.boolean, 'corrupt'), ['bool'])),
('out_of_order', (YLeaf(YType.boolean, 'out-of-order'), ['bool'])),
('no_data_packets', (YLeaf(YType.boolean, 'no-data-packets'), ['bool'])),
('result', (YLeaf(YType.int32, 'result'), ['int'])),
('frames_sent', (YLeaf(YType.uint32, 'frames-sent'), ['int'])),
('frames_lost', (YLeaf(YType.uint32, 'frames-lost'), ['int'])),
])
self.sent_at = None
self.sent = None
self.timed_out = None
self.corrupt = None
self.out_of_order = None
self.no_data_packets = None
self.result = None
self.frames_sent = None
self.frames_lost = None
self._segment_path = lambda: "sample"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-historicals/statistics-historical/operation-metric/bucket/contents/unaggregated/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsHistoricals.StatisticsHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample, ['sent_at', 'sent', 'timed_out', 'corrupt', 'out_of_order', 'no_data_packets', 'result', 'frames_sent', 'frames_lost'], name, value)
class StatisticsOnDemandHistoricals(Entity):
"""
Table of historical statistics for SLA
on\-demand operations
.. attribute:: statistics_on_demand_historical
Historical statistics data for an SLA on\-demand operation
**type**\: list of :py:class:`StatisticsOnDemandHistorical <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals, self).__init__()
self.yang_name = "statistics-on-demand-historicals"
self.yang_parent_name = "ethernet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("statistics-on-demand-historical", ("statistics_on_demand_historical", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical))])
self._leafs = OrderedDict()
self.statistics_on_demand_historical = YList(self)
self._segment_path = lambda: "statistics-on-demand-historicals"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals, [], name, value)
class StatisticsOnDemandHistorical(Entity):
"""
Historical statistics data for an SLA
on\-demand operation
.. attribute:: operation_id
Operation ID
**type**\: int
**range:** 1..4294967295
.. attribute:: domain_name
Domain name
**type**\: str
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: mep_id
MEP ID in the range 1 to 8191. Either MEP ID or MAC address must be specified
**type**\: int
**range:** 1..8191
.. attribute:: mac_address
Unicast MAC Address in xxxx.xxxx.xxxx format. Either MEP ID or MAC address must be specified
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: specific_options
Options specific to the type of operation
**type**\: :py:class:`SpecificOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions>`
.. attribute:: operation_schedule
Operation schedule
**type**\: :py:class:`OperationSchedule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule>`
.. attribute:: probe_type
Type of probe used by the operation
**type**\: str
.. attribute:: display_short
Short display name used by the operation
**type**\: str
.. attribute:: display_long
Long display name used by the operation
**type**\: str
.. attribute:: flr_calculation_interval
Interval between FLR calculations for SLM, in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: operation_metric
Metrics gathered for the operation
**type**\: list of :py:class:`OperationMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical, self).__init__()
self.yang_name = "statistics-on-demand-historical"
self.yang_parent_name = "statistics-on-demand-historicals"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("specific-options", ("specific_options", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions)), ("operation-schedule", ("operation_schedule", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule)), ("operation-metric", ("operation_metric", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric))])
self._leafs = OrderedDict([
('operation_id', (YLeaf(YType.uint32, 'operation-id'), ['int'])),
('domain_name', (YLeaf(YType.str, 'domain-name'), ['str'])),
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('mep_id', (YLeaf(YType.uint32, 'mep-id'), ['int'])),
('mac_address', (YLeaf(YType.str, 'mac-address'), ['str'])),
('probe_type', (YLeaf(YType.str, 'probe-type'), ['str'])),
('display_short', (YLeaf(YType.str, 'display-short'), ['str'])),
('display_long', (YLeaf(YType.str, 'display-long'), ['str'])),
('flr_calculation_interval', (YLeaf(YType.uint32, 'flr-calculation-interval'), ['int'])),
])
self.operation_id = None
self.domain_name = None
self.interface_name = None
self.mep_id = None
self.mac_address = None
self.probe_type = None
self.display_short = None
self.display_long = None
self.flr_calculation_interval = None
self.specific_options = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions()
self.specific_options.parent = self
self._children_name_map["specific_options"] = "specific-options"
self.operation_schedule = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule()
self.operation_schedule.parent = self
self._children_name_map["operation_schedule"] = "operation-schedule"
self.operation_metric = YList(self)
self._segment_path = lambda: "statistics-on-demand-historical"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical, ['operation_id', 'domain_name', 'interface_name', 'mep_id', 'mac_address', 'probe_type', 'display_short', 'display_long', 'flr_calculation_interval'], name, value)
class SpecificOptions(Entity):
"""
Options specific to the type of operation
.. attribute:: configured_operation_options
Parameters for a configured operation
**type**\: :py:class:`ConfiguredOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions>`
.. attribute:: ondemand_operation_options
Parameters for an ondemand operation
**type**\: :py:class:`OndemandOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions>`
.. attribute:: oper_type
OperType
**type**\: :py:class:`SlaOperOperation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperOperation>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions, self).__init__()
self.yang_name = "specific-options"
self.yang_parent_name = "statistics-on-demand-historical"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("configured-operation-options", ("configured_operation_options", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions)), ("ondemand-operation-options", ("ondemand_operation_options", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions))])
self._leafs = OrderedDict([
('oper_type', (YLeaf(YType.enumeration, 'oper-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperation', '')])),
])
self.oper_type = None
self.configured_operation_options = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions()
self.configured_operation_options.parent = self
self._children_name_map["configured_operation_options"] = "configured-operation-options"
self.ondemand_operation_options = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions()
self.ondemand_operation_options.parent = self
self._children_name_map["ondemand_operation_options"] = "ondemand-operation-options"
self._segment_path = lambda: "specific-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions, ['oper_type'], name, value)
class ConfiguredOperationOptions(Entity):
"""
Parameters for a configured operation
.. attribute:: profile_name
Name of the profile used by the operation
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions, self).__init__()
self.yang_name = "configured-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
])
self.profile_name = None
self._segment_path = lambda: "configured-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.ConfiguredOperationOptions, ['profile_name'], name, value)
class OndemandOperationOptions(Entity):
"""
Parameters for an ondemand operation
.. attribute:: ondemand_operation_id
ID of the ondemand operation
**type**\: int
**range:** 0..4294967295
.. attribute:: probe_count
Total number of probes sent during the operation
**type**\: int
**range:** 0..255
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions, self).__init__()
self.yang_name = "ondemand-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ondemand_operation_id', (YLeaf(YType.uint32, 'ondemand-operation-id'), ['int'])),
('probe_count', (YLeaf(YType.uint8, 'probe-count'), ['int'])),
])
self.ondemand_operation_id = None
self.probe_count = None
self._segment_path = lambda: "ondemand-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.SpecificOptions.OndemandOperationOptions, ['ondemand_operation_id', 'probe_count'], name, value)
class OperationSchedule(Entity):
"""
Operation schedule
.. attribute:: start_time
Start time of the first probe, in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time_configured
Whether or not the operation start time was explicitly configured
**type**\: bool
.. attribute:: schedule_duration
Duration of a probe for the operation in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: schedule_interval
Interval between the start times of consecutive probes, in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule, self).__init__()
self.yang_name = "operation-schedule"
self.yang_parent_name = "statistics-on-demand-historical"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time', (YLeaf(YType.uint32, 'start-time'), ['int'])),
('start_time_configured', (YLeaf(YType.boolean, 'start-time-configured'), ['bool'])),
('schedule_duration', (YLeaf(YType.uint32, 'schedule-duration'), ['int'])),
('schedule_interval', (YLeaf(YType.uint32, 'schedule-interval'), ['int'])),
])
self.start_time = None
self.start_time_configured = None
self.schedule_duration = None
self.schedule_interval = None
self._segment_path = lambda: "operation-schedule"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationSchedule, ['start_time', 'start_time_configured', 'schedule_duration', 'schedule_interval'], name, value)
class OperationMetric(Entity):
"""
Metrics gathered for the operation
.. attribute:: config
Configuration of the metric
**type**\: :py:class:`Config <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config>`
.. attribute:: bucket
Buckets stored for the metric
**type**\: list of :py:class:`Bucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric, self).__init__()
self.yang_name = "operation-metric"
self.yang_parent_name = "statistics-on-demand-historical"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("config", ("config", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config)), ("bucket", ("bucket", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket))])
self._leafs = OrderedDict()
self.config = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self.bucket = YList(self)
self._segment_path = lambda: "operation-metric"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric, [], name, value)
class Config(Entity):
"""
Configuration of the metric
.. attribute:: metric_type
Type of metric to which this configuration applies
**type**\: :py:class:`SlaRecordableMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaRecordableMetric>`
.. attribute:: bins_count
Total number of bins into which to aggregate. 0 if no aggregation
**type**\: int
**range:** 0..65535
.. attribute:: bins_width
Width of each bin into which to aggregate. 0 if no aggregation. For SLM, the units of this value are in single units of percent; for LMM they are in tenths of percent; for other measurements they are in milliseconds
**type**\: int
**range:** 0..65535
.. attribute:: bucket_size
Size of buckets into which measurements are collected
**type**\: int
**range:** 0..255
.. attribute:: bucket_size_unit
Whether bucket size is 'per\-probe' or 'probes'
**type**\: :py:class:`SlaBucketSize <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaBucketSize>`
.. attribute:: buckets_archive
Maximum number of buckets to store in memory
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric_type', (YLeaf(YType.enumeration, 'metric-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetric', '')])),
('bins_count', (YLeaf(YType.uint16, 'bins-count'), ['int'])),
('bins_width', (YLeaf(YType.uint16, 'bins-width'), ['int'])),
('bucket_size', (YLeaf(YType.uint8, 'bucket-size'), ['int'])),
('bucket_size_unit', (YLeaf(YType.enumeration, 'bucket-size-unit'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSize', '')])),
('buckets_archive', (YLeaf(YType.uint32, 'buckets-archive'), ['int'])),
])
self.metric_type = None
self.bins_count = None
self.bins_width = None
self.bucket_size = None
self.bucket_size_unit = None
self.buckets_archive = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Config, ['metric_type', 'bins_count', 'bins_width', 'bucket_size', 'bucket_size_unit', 'buckets_archive'], name, value)
class Bucket(Entity):
"""
Buckets stored for the metric
.. attribute:: contents
The contents of the bucket; bins or samples
**type**\: :py:class:`Contents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents>`
.. attribute:: start_at
Absolute time that the bucket started being filled at
**type**\: int
**range:** 0..4294967295
.. attribute:: duration
Length of time for which the bucket is being filled in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: sent
Number of packets sent in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: lost
Number of lost packets in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: corrupt
Number of corrupt packets in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_order
Number of packets recieved out\-of\-order in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: duplicates
Number of duplicate packets received in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: minimum
Overall minimum result in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: maximum
Overall minimum result in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: time_of_minimum
Absolute time that the minimum value was recorded
**type**\: int
**range:** 0..4294967295
.. attribute:: time_of_maximum
Absolute time that the maximum value was recorded
**type**\: int
**range:** 0..4294967295
.. attribute:: average
Mean of the results in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: standard_deviation
Standard deviation of the results in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: result_count
The count of samples collected in the bucket
**type**\: int
**range:** 0..4294967295
.. attribute:: data_sent_count
The number of data packets sent across the bucket, used in the calculation of overall FLR
**type**\: int
**range:** 0..4294967295
.. attribute:: data_lost_count
The number of data packets lost across the bucket, used in the calculation of overall FLR
**type**\: int
**range:** 0..4294967295
.. attribute:: overall_flr
Frame Loss Ratio across the whole bucket, in millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: suspect_start_mid_bucket
Results suspect due to a probe starting mid\-way through a bucket
**type**\: bool
.. attribute:: suspect_schedule_latency
Results suspect due to scheduling latency causing one or more packets to not be sent
**type**\: bool
.. attribute:: suspect_send_fail
Results suspect due to failure to send one or more packets
**type**\: bool
.. attribute:: suspect_premature_end
Results suspect due to a probe ending prematurely
**type**\: bool
.. attribute:: suspect_clock_drift
Results suspect as more than 10 seconds time drift detected
**type**\: bool
.. attribute:: suspect_memory_allocation_failed
Results suspect due to a memory allocation failure
**type**\: bool
.. attribute:: suspect_cleared_mid_bucket
Results suspect as bucket was cleared mid\-way through being filled
**type**\: bool
.. attribute:: suspect_probe_restarted
Results suspect as probe restarted mid\-way through the bucket
**type**\: bool
.. attribute:: suspect_management_latency
Results suspect as processing of results has been delayed
**type**\: bool
.. attribute:: suspect_multiple_buckets
Results suspect as the probe has been configured across multiple buckets
**type**\: bool
.. attribute:: suspect_misordering
Results suspect as misordering has been detected , affecting results
**type**\: bool
.. attribute:: suspect_flr_low_packet_count
Results suspect as FLR calculated based on a low packet count
**type**\: bool
.. attribute:: premature_reason
If the probe ended prematurely, the error that caused a probe to end
**type**\: int
**range:** 0..4294967295
.. attribute:: premature_reason_string
Description of the error code that caused the probe to end prematurely. For informational purposes only
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket, self).__init__()
self.yang_name = "bucket"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("contents", ("contents", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents))])
self._leafs = OrderedDict([
('start_at', (YLeaf(YType.uint32, 'start-at'), ['int'])),
('duration', (YLeaf(YType.uint32, 'duration'), ['int'])),
('sent', (YLeaf(YType.uint32, 'sent'), ['int'])),
('lost', (YLeaf(YType.uint32, 'lost'), ['int'])),
('corrupt', (YLeaf(YType.uint32, 'corrupt'), ['int'])),
('out_of_order', (YLeaf(YType.uint32, 'out-of-order'), ['int'])),
('duplicates', (YLeaf(YType.uint32, 'duplicates'), ['int'])),
('minimum', (YLeaf(YType.int32, 'minimum'), ['int'])),
('maximum', (YLeaf(YType.int32, 'maximum'), ['int'])),
('time_of_minimum', (YLeaf(YType.uint32, 'time-of-minimum'), ['int'])),
('time_of_maximum', (YLeaf(YType.uint32, 'time-of-maximum'), ['int'])),
('average', (YLeaf(YType.int32, 'average'), ['int'])),
('standard_deviation', (YLeaf(YType.int32, 'standard-deviation'), ['int'])),
('result_count', (YLeaf(YType.uint32, 'result-count'), ['int'])),
('data_sent_count', (YLeaf(YType.uint32, 'data-sent-count'), ['int'])),
('data_lost_count', (YLeaf(YType.uint32, 'data-lost-count'), ['int'])),
('overall_flr', (YLeaf(YType.int32, 'overall-flr'), ['int'])),
('suspect_start_mid_bucket', (YLeaf(YType.boolean, 'suspect-start-mid-bucket'), ['bool'])),
('suspect_schedule_latency', (YLeaf(YType.boolean, 'suspect-schedule-latency'), ['bool'])),
('suspect_send_fail', (YLeaf(YType.boolean, 'suspect-send-fail'), ['bool'])),
('suspect_premature_end', (YLeaf(YType.boolean, 'suspect-premature-end'), ['bool'])),
('suspect_clock_drift', (YLeaf(YType.boolean, 'suspect-clock-drift'), ['bool'])),
('suspect_memory_allocation_failed', (YLeaf(YType.boolean, 'suspect-memory-allocation-failed'), ['bool'])),
('suspect_cleared_mid_bucket', (YLeaf(YType.boolean, 'suspect-cleared-mid-bucket'), ['bool'])),
('suspect_probe_restarted', (YLeaf(YType.boolean, 'suspect-probe-restarted'), ['bool'])),
('suspect_management_latency', (YLeaf(YType.boolean, 'suspect-management-latency'), ['bool'])),
('suspect_multiple_buckets', (YLeaf(YType.boolean, 'suspect-multiple-buckets'), ['bool'])),
('suspect_misordering', (YLeaf(YType.boolean, 'suspect-misordering'), ['bool'])),
('suspect_flr_low_packet_count', (YLeaf(YType.boolean, 'suspect-flr-low-packet-count'), ['bool'])),
('premature_reason', (YLeaf(YType.uint32, 'premature-reason'), ['int'])),
('premature_reason_string', (YLeaf(YType.str, 'premature-reason-string'), ['str'])),
])
self.start_at = None
self.duration = None
self.sent = None
self.lost = None
self.corrupt = None
self.out_of_order = None
self.duplicates = None
self.minimum = None
self.maximum = None
self.time_of_minimum = None
self.time_of_maximum = None
self.average = None
self.standard_deviation = None
self.result_count = None
self.data_sent_count = None
self.data_lost_count = None
self.overall_flr = None
self.suspect_start_mid_bucket = None
self.suspect_schedule_latency = None
self.suspect_send_fail = None
self.suspect_premature_end = None
self.suspect_clock_drift = None
self.suspect_memory_allocation_failed = None
self.suspect_cleared_mid_bucket = None
self.suspect_probe_restarted = None
self.suspect_management_latency = None
self.suspect_multiple_buckets = None
self.suspect_misordering = None
self.suspect_flr_low_packet_count = None
self.premature_reason = None
self.premature_reason_string = None
self.contents = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents()
self.contents.parent = self
self._children_name_map["contents"] = "contents"
self._segment_path = lambda: "bucket"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket, ['start_at', 'duration', 'sent', 'lost', 'corrupt', 'out_of_order', 'duplicates', 'minimum', 'maximum', 'time_of_minimum', 'time_of_maximum', 'average', 'standard_deviation', 'result_count', 'data_sent_count', 'data_lost_count', 'overall_flr', 'suspect_start_mid_bucket', 'suspect_schedule_latency', 'suspect_send_fail', 'suspect_premature_end', 'suspect_clock_drift', 'suspect_memory_allocation_failed', 'suspect_cleared_mid_bucket', 'suspect_probe_restarted', 'suspect_management_latency', 'suspect_multiple_buckets', 'suspect_misordering', 'suspect_flr_low_packet_count', 'premature_reason', 'premature_reason_string'], name, value)
class Contents(Entity):
"""
The contents of the bucket; bins or samples
.. attribute:: aggregated
Result bins in an SLA metric bucket
**type**\: :py:class:`Aggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated>`
.. attribute:: unaggregated
Result samples in an SLA metric bucket
**type**\: :py:class:`Unaggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated>`
.. attribute:: bucket_type
BucketType
**type**\: :py:class:`SlaOperBucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperBucket>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents, self).__init__()
self.yang_name = "contents"
self.yang_parent_name = "bucket"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("aggregated", ("aggregated", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated)), ("unaggregated", ("unaggregated", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated))])
self._leafs = OrderedDict([
('bucket_type', (YLeaf(YType.enumeration, 'bucket-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperBucket', '')])),
])
self.bucket_type = None
self.aggregated = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated()
self.aggregated.parent = self
self._children_name_map["aggregated"] = "aggregated"
self.unaggregated = Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated()
self.unaggregated.parent = self
self._children_name_map["unaggregated"] = "unaggregated"
self._segment_path = lambda: "contents"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/operation-metric/bucket/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents, ['bucket_type'], name, value)
class Aggregated(Entity):
"""
Result bins in an SLA metric bucket
.. attribute:: bins
The bins of an SLA metric bucket
**type**\: list of :py:class:`Bins <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated, self).__init__()
self.yang_name = "aggregated"
self.yang_parent_name = "contents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("bins", ("bins", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins))])
self._leafs = OrderedDict()
self.bins = YList(self)
self._segment_path = lambda: "aggregated"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/operation-metric/bucket/contents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated, [], name, value)
class Bins(Entity):
"""
The bins of an SLA metric bucket
.. attribute:: lower_bound
Lower bound (inclusive) of the bin, in milliseconds or single units of percent. This field is not used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: upper_bound
Upper bound (exclusive) of the bin, in milliseconds or single units of percent. This field is not used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: lower_bound_tenths
Lower bound (inclusive) of the bin, in tenths of percent. This field is only used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: upper_bound_tenths
Upper bound (exclusive) of the bin, in tenths of percent. This field is only used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: sum
The sum of the results in the bin, in microseconds or millionths of a percent
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: count
The total number of results in the bin
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins, self).__init__()
self.yang_name = "bins"
self.yang_parent_name = "aggregated"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lower_bound', (YLeaf(YType.int32, 'lower-bound'), ['int'])),
('upper_bound', (YLeaf(YType.int32, 'upper-bound'), ['int'])),
('lower_bound_tenths', (YLeaf(YType.int32, 'lower-bound-tenths'), ['int'])),
('upper_bound_tenths', (YLeaf(YType.int32, 'upper-bound-tenths'), ['int'])),
('sum', (YLeaf(YType.int64, 'sum'), ['int'])),
('count', (YLeaf(YType.uint32, 'count'), ['int'])),
])
self.lower_bound = None
self.upper_bound = None
self.lower_bound_tenths = None
self.upper_bound_tenths = None
self.sum = None
self.count = None
self._segment_path = lambda: "bins"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/operation-metric/bucket/contents/aggregated/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Aggregated.Bins, ['lower_bound', 'upper_bound', 'lower_bound_tenths', 'upper_bound_tenths', 'sum', 'count'], name, value)
class Unaggregated(Entity):
"""
Result samples in an SLA metric bucket
.. attribute:: sample
The samples of an SLA metric bucket
**type**\: list of :py:class:`Sample <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated, self).__init__()
self.yang_name = "unaggregated"
self.yang_parent_name = "contents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sample", ("sample", Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample))])
self._leafs = OrderedDict()
self.sample = YList(self)
self._segment_path = lambda: "unaggregated"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/operation-metric/bucket/contents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated, [], name, value)
class Sample(Entity):
"""
The samples of an SLA metric bucket
.. attribute:: sent_at
The time (in milliseconds relative to the start time of the bucket) that the sample was sent at
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: sent
Whether the sample packet was sucessfully sent
**type**\: bool
.. attribute:: timed_out
Whether the sample packet timed out
**type**\: bool
.. attribute:: corrupt
Whether the sample packet was corrupt
**type**\: bool
.. attribute:: out_of_order
Whether the sample packet was received out\-of\-order
**type**\: bool
.. attribute:: no_data_packets
Whether a measurement could not be made because no data packets were sent in the sample period. Only applicable for LMM measurements
**type**\: bool
.. attribute:: result
The result (in microseconds or millionths of a percent) of the sample, if available
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: frames_sent
For FLR measurements, the number of frames sent, if available
**type**\: int
**range:** 0..4294967295
.. attribute:: frames_lost
For FLR measurements, the number of frames lost, if available
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample, self).__init__()
self.yang_name = "sample"
self.yang_parent_name = "unaggregated"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sent_at', (YLeaf(YType.uint32, 'sent-at'), ['int'])),
('sent', (YLeaf(YType.boolean, 'sent'), ['bool'])),
('timed_out', (YLeaf(YType.boolean, 'timed-out'), ['bool'])),
('corrupt', (YLeaf(YType.boolean, 'corrupt'), ['bool'])),
('out_of_order', (YLeaf(YType.boolean, 'out-of-order'), ['bool'])),
('no_data_packets', (YLeaf(YType.boolean, 'no-data-packets'), ['bool'])),
('result', (YLeaf(YType.int32, 'result'), ['int'])),
('frames_sent', (YLeaf(YType.uint32, 'frames-sent'), ['int'])),
('frames_lost', (YLeaf(YType.uint32, 'frames-lost'), ['int'])),
])
self.sent_at = None
self.sent = None
self.timed_out = None
self.corrupt = None
self.out_of_order = None
self.no_data_packets = None
self.result = None
self.frames_sent = None
self.frames_lost = None
self._segment_path = lambda: "sample"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-on-demand-historicals/statistics-on-demand-historical/operation-metric/bucket/contents/unaggregated/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsOnDemandHistoricals.StatisticsOnDemandHistorical.OperationMetric.Bucket.Contents.Unaggregated.Sample, ['sent_at', 'sent', 'timed_out', 'corrupt', 'out_of_order', 'no_data_packets', 'result', 'frames_sent', 'frames_lost'], name, value)
class ConfigErrors(Entity):
"""
Table of SLA configuration errors on configured
operations
.. attribute:: config_error
SLA operation to get configuration errors data for
**type**\: list of :py:class:`ConfigError <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.ConfigErrors.ConfigError>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.ConfigErrors, self).__init__()
self.yang_name = "config-errors"
self.yang_parent_name = "ethernet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("config-error", ("config_error", Sla.Protocols.Ethernet.ConfigErrors.ConfigError))])
self._leafs = OrderedDict()
self.config_error = YList(self)
self._segment_path = lambda: "config-errors"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.ConfigErrors, [], name, value)
class ConfigError(Entity):
"""
SLA operation to get configuration errors data
for
.. attribute:: profile_name
Profile Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: domain_name
Domain name
**type**\: str
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: mep_id
MEP ID in the range 1 to 8191
**type**\: int
**range:** 1..8191
.. attribute:: mac_address
Unicast MAC Address in xxxx.xxxx.xxxx format
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: profile_name_xr
The name of the operation profile
**type**\: str
.. attribute:: display_short
Short display name used by the operation
**type**\: str
.. attribute:: rt_delay_inconsistent
Is the profile configured to collect RT Delay but the packet type doesn't support it?
**type**\: bool
.. attribute:: ow_delay_sd_inconsistent
Is the profile configured to collect OW Delay (SD) but the packet type doesn't support it?
**type**\: bool
.. attribute:: ow_delay_ds_inconsistent
Is the profile configured to collect OW Delay (DS) but the packet type doesn't support it?
**type**\: bool
.. attribute:: rt_jitter_inconsistent
Is the profile configured to collect RT Jitter but the packet type doesn't support it?
**type**\: bool
.. attribute:: ow_jitter_sd_inconsistent
Is the profile configured to collect OW Jitter (SD) but the packet type doesn't support it?
**type**\: bool
.. attribute:: ow_jitter_ds_inconsistent
Is the profile configured to collect OW Delay (DS) but the packet type doesn't support it?
**type**\: bool
.. attribute:: ow_loss_sd_inconsistent
Is the profile configured to collect OW Frame Loss (SD) but the packet type doesn't support it ?
**type**\: bool
.. attribute:: ow_loss_ds_inconsistent
Is the profile configured to collect OW Frame Loss (DS) but the packet type doesn't support it ?
**type**\: bool
.. attribute:: packet_pad_inconsistent
Is the profile configured to pad packets but the packet type doesn't support it?
**type**\: bool
.. attribute:: packet_rand_pad_inconsistent
Is the profile configured to pad packets with a pseudo\-random string but the packet type doesn't support it?
**type**\: bool
.. attribute:: min_packet_interval_inconsistent
Is the profile configured to send packets more frequently than the protocol allows?
**type**\: bool
.. attribute:: priority_inconsistent
Is the profile configured to use a packet priority scheme that the protocol does not support?
**type**\: bool
.. attribute:: packet_type_inconsistent
Is the profile configured to use a packet type that isn't supported by any protocols?
**type**\: bool
.. attribute:: profile_doesnt_exist
Is the operation configured to use a profile that is not currently defined for the protocol?
**type**\: bool
.. attribute:: synthetic_loss_not_supported
The profile is configured to use a packet type which doesn't support synthetic loss measurement and the number of packets per FLR calculation has been configured
**type**\: bool
.. attribute:: probe_too_big
The profile is configured to use a packet type which does not allow more than 72000 packets per probe and greater than 72000 packets per probe have been configured
**type**\: bool
.. attribute:: error_string
Displays other issues not indicated from the flags above, for example MIB incompatibility issues
**type**\: list of str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.ConfigErrors.ConfigError, self).__init__()
self.yang_name = "config-error"
self.yang_parent_name = "config-errors"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
('domain_name', (YLeaf(YType.str, 'domain-name'), ['str'])),
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('mep_id', (YLeaf(YType.uint32, 'mep-id'), ['int'])),
('mac_address', (YLeaf(YType.str, 'mac-address'), ['str'])),
('profile_name_xr', (YLeaf(YType.str, 'profile-name-xr'), ['str'])),
('display_short', (YLeaf(YType.str, 'display-short'), ['str'])),
('rt_delay_inconsistent', (YLeaf(YType.boolean, 'rt-delay-inconsistent'), ['bool'])),
('ow_delay_sd_inconsistent', (YLeaf(YType.boolean, 'ow-delay-sd-inconsistent'), ['bool'])),
('ow_delay_ds_inconsistent', (YLeaf(YType.boolean, 'ow-delay-ds-inconsistent'), ['bool'])),
('rt_jitter_inconsistent', (YLeaf(YType.boolean, 'rt-jitter-inconsistent'), ['bool'])),
('ow_jitter_sd_inconsistent', (YLeaf(YType.boolean, 'ow-jitter-sd-inconsistent'), ['bool'])),
('ow_jitter_ds_inconsistent', (YLeaf(YType.boolean, 'ow-jitter-ds-inconsistent'), ['bool'])),
('ow_loss_sd_inconsistent', (YLeaf(YType.boolean, 'ow-loss-sd-inconsistent'), ['bool'])),
('ow_loss_ds_inconsistent', (YLeaf(YType.boolean, 'ow-loss-ds-inconsistent'), ['bool'])),
('packet_pad_inconsistent', (YLeaf(YType.boolean, 'packet-pad-inconsistent'), ['bool'])),
('packet_rand_pad_inconsistent', (YLeaf(YType.boolean, 'packet-rand-pad-inconsistent'), ['bool'])),
('min_packet_interval_inconsistent', (YLeaf(YType.boolean, 'min-packet-interval-inconsistent'), ['bool'])),
('priority_inconsistent', (YLeaf(YType.boolean, 'priority-inconsistent'), ['bool'])),
('packet_type_inconsistent', (YLeaf(YType.boolean, 'packet-type-inconsistent'), ['bool'])),
('profile_doesnt_exist', (YLeaf(YType.boolean, 'profile-doesnt-exist'), ['bool'])),
('synthetic_loss_not_supported', (YLeaf(YType.boolean, 'synthetic-loss-not-supported'), ['bool'])),
('probe_too_big', (YLeaf(YType.boolean, 'probe-too-big'), ['bool'])),
('error_string', (YLeafList(YType.str, 'error-string'), ['str'])),
])
self.profile_name = None
self.domain_name = None
self.interface_name = None
self.mep_id = None
self.mac_address = None
self.profile_name_xr = None
self.display_short = None
self.rt_delay_inconsistent = None
self.ow_delay_sd_inconsistent = None
self.ow_delay_ds_inconsistent = None
self.rt_jitter_inconsistent = None
self.ow_jitter_sd_inconsistent = None
self.ow_jitter_ds_inconsistent = None
self.ow_loss_sd_inconsistent = None
self.ow_loss_ds_inconsistent = None
self.packet_pad_inconsistent = None
self.packet_rand_pad_inconsistent = None
self.min_packet_interval_inconsistent = None
self.priority_inconsistent = None
self.packet_type_inconsistent = None
self.profile_doesnt_exist = None
self.synthetic_loss_not_supported = None
self.probe_too_big = None
self.error_string = []
self._segment_path = lambda: "config-error"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/config-errors/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.ConfigErrors.ConfigError, ['profile_name', 'domain_name', 'interface_name', 'mep_id', 'mac_address', 'profile_name_xr', 'display_short', 'rt_delay_inconsistent', 'ow_delay_sd_inconsistent', 'ow_delay_ds_inconsistent', 'rt_jitter_inconsistent', 'ow_jitter_sd_inconsistent', 'ow_jitter_ds_inconsistent', 'ow_loss_sd_inconsistent', 'ow_loss_ds_inconsistent', 'packet_pad_inconsistent', 'packet_rand_pad_inconsistent', 'min_packet_interval_inconsistent', 'priority_inconsistent', 'packet_type_inconsistent', 'profile_doesnt_exist', 'synthetic_loss_not_supported', 'probe_too_big', 'error_string'], name, value)
class OnDemandOperations(Entity):
"""
Table of SLA on\-demand operations
.. attribute:: on_demand_operation
SLA on\-demand operation to get operation data for
**type**\: list of :py:class:`OnDemandOperation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations, self).__init__()
self.yang_name = "on-demand-operations"
self.yang_parent_name = "ethernet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("on-demand-operation", ("on_demand_operation", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation))])
self._leafs = OrderedDict()
self.on_demand_operation = YList(self)
self._segment_path = lambda: "on-demand-operations"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations, [], name, value)
class OnDemandOperation(Entity):
"""
SLA on\-demand operation to get operation data
for
.. attribute:: operation_id
Operation ID
**type**\: int
**range:** 1..4294967295
.. attribute:: domain_name
Domain name
**type**\: str
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: mep_id
MEP ID in the range 1 to 8191. Either MEP ID or MAC address must be specified
**type**\: int
**range:** 1..8191
.. attribute:: mac_address
Unicast MAC Address in xxxx.xxxx.xxxx format. Either MEP ID or MAC address must be specified
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: profile_options
Options that are only valid if the operation has a profile
**type**\: :py:class:`ProfileOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions>`
.. attribute:: specific_options
Options specific to the type of operation
**type**\: :py:class:`SpecificOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions>`
.. attribute:: display_short
Short display name used by the operation
**type**\: str
.. attribute:: display_long
Long display name used by the operation
**type**\: str
.. attribute:: last_run
Time that the last probe for the operation was run, NULL if never run
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation, self).__init__()
self.yang_name = "on-demand-operation"
self.yang_parent_name = "on-demand-operations"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("profile-options", ("profile_options", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions)), ("specific-options", ("specific_options", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions))])
self._leafs = OrderedDict([
('operation_id', (YLeaf(YType.uint32, 'operation-id'), ['int'])),
('domain_name', (YLeaf(YType.str, 'domain-name'), ['str'])),
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('mep_id', (YLeaf(YType.uint32, 'mep-id'), ['int'])),
('mac_address', (YLeaf(YType.str, 'mac-address'), ['str'])),
('display_short', (YLeaf(YType.str, 'display-short'), ['str'])),
('display_long', (YLeaf(YType.str, 'display-long'), ['str'])),
('last_run', (YLeaf(YType.uint32, 'last-run'), ['int'])),
])
self.operation_id = None
self.domain_name = None
self.interface_name = None
self.mep_id = None
self.mac_address = None
self.display_short = None
self.display_long = None
self.last_run = None
self.profile_options = Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions()
self.profile_options.parent = self
self._children_name_map["profile_options"] = "profile-options"
self.specific_options = Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions()
self.specific_options.parent = self
self._children_name_map["specific_options"] = "specific-options"
self._segment_path = lambda: "on-demand-operation"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation, ['operation_id', 'domain_name', 'interface_name', 'mep_id', 'mac_address', 'display_short', 'display_long', 'last_run'], name, value)
class ProfileOptions(Entity):
"""
Options that are only valid if the operation has
a profile
.. attribute:: packet_padding
Configuration of the packet padding
**type**\: :py:class:`PacketPadding <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding>`
.. attribute:: priority
Priority at which to send the packet, if configured
**type**\: :py:class:`Priority <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority>`
.. attribute:: operation_schedule
Operation schedule
**type**\: :py:class:`OperationSchedule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule>`
.. attribute:: probe_type
Type of probe used by the operation
**type**\: str
.. attribute:: packets_per_burst
Number of packets sent per burst
**type**\: int
**range:** 0..65535
.. attribute:: inter_packet_interval
Interval between packets within a burst in milliseconds
**type**\: int
**range:** 0..65535
**units**\: millisecond
.. attribute:: bursts_per_probe
Number of bursts sent per probe
**type**\: int
**range:** 0..4294967295
.. attribute:: inter_burst_interval
Interval between bursts within a probe in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: flr_calculation_interval
Interval between FLR calculations for SLM, in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: operation_metric
Array of the metrics that are measured by the operation
**type**\: list of :py:class:`OperationMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions, self).__init__()
self.yang_name = "profile-options"
self.yang_parent_name = "on-demand-operation"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("packet-padding", ("packet_padding", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding)), ("priority", ("priority", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority)), ("operation-schedule", ("operation_schedule", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule)), ("operation-metric", ("operation_metric", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric))])
self._leafs = OrderedDict([
('probe_type', (YLeaf(YType.str, 'probe-type'), ['str'])),
('packets_per_burst', (YLeaf(YType.uint16, 'packets-per-burst'), ['int'])),
('inter_packet_interval', (YLeaf(YType.uint16, 'inter-packet-interval'), ['int'])),
('bursts_per_probe', (YLeaf(YType.uint32, 'bursts-per-probe'), ['int'])),
('inter_burst_interval', (YLeaf(YType.uint32, 'inter-burst-interval'), ['int'])),
('flr_calculation_interval', (YLeaf(YType.uint32, 'flr-calculation-interval'), ['int'])),
])
self.probe_type = None
self.packets_per_burst = None
self.inter_packet_interval = None
self.bursts_per_probe = None
self.inter_burst_interval = None
self.flr_calculation_interval = None
self.packet_padding = Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding()
self.packet_padding.parent = self
self._children_name_map["packet_padding"] = "packet-padding"
self.priority = Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority()
self.priority.parent = self
self._children_name_map["priority"] = "priority"
self.operation_schedule = Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule()
self.operation_schedule.parent = self
self._children_name_map["operation_schedule"] = "operation-schedule"
self.operation_metric = YList(self)
self._segment_path = lambda: "profile-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions, ['probe_type', 'packets_per_burst', 'inter_packet_interval', 'bursts_per_probe', 'inter_burst_interval', 'flr_calculation_interval'], name, value)
class PacketPadding(Entity):
"""
Configuration of the packet padding
.. attribute:: packet_pad_size
Size that packets are being padded to
**type**\: int
**range:** 0..65535
.. attribute:: test_pattern_pad_scheme
Test pattern scheme that is used in the packet padding
**type**\: :py:class:`SlaOperTestPatternScheme <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperTestPatternScheme>`
.. attribute:: test_pattern_pad_hex_string
Hex string that is used in the packet padding
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding, self).__init__()
self.yang_name = "packet-padding"
self.yang_parent_name = "profile-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('packet_pad_size', (YLeaf(YType.uint16, 'packet-pad-size'), ['int'])),
('test_pattern_pad_scheme', (YLeaf(YType.enumeration, 'test-pattern-pad-scheme'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperTestPatternScheme', '')])),
('test_pattern_pad_hex_string', (YLeaf(YType.uint32, 'test-pattern-pad-hex-string'), ['int'])),
])
self.packet_pad_size = None
self.test_pattern_pad_scheme = None
self.test_pattern_pad_hex_string = None
self._segment_path = lambda: "packet-padding"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/profile-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.PacketPadding, ['packet_pad_size', 'test_pattern_pad_scheme', 'test_pattern_pad_hex_string'], name, value)
class Priority(Entity):
"""
Priority at which to send the packet, if
configured
.. attribute:: priority_type
PriorityType
**type**\: :py:class:`SlaOperPacketPriority <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperPacketPriority>`
.. attribute:: cos
3\-bit COS priority value applied to packets
**type**\: int
**range:** 0..255
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority, self).__init__()
self.yang_name = "priority"
self.yang_parent_name = "profile-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('priority_type', (YLeaf(YType.enumeration, 'priority-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperPacketPriority', '')])),
('cos', (YLeaf(YType.uint8, 'cos'), ['int'])),
])
self.priority_type = None
self.cos = None
self._segment_path = lambda: "priority"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/profile-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.Priority, ['priority_type', 'cos'], name, value)
class OperationSchedule(Entity):
"""
Operation schedule
.. attribute:: start_time
Start time of the first probe, in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time_configured
Whether or not the operation start time was explicitly configured
**type**\: bool
.. attribute:: schedule_duration
Duration of a probe for the operation in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: schedule_interval
Interval between the start times of consecutive probes, in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule, self).__init__()
self.yang_name = "operation-schedule"
self.yang_parent_name = "profile-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time', (YLeaf(YType.uint32, 'start-time'), ['int'])),
('start_time_configured', (YLeaf(YType.boolean, 'start-time-configured'), ['bool'])),
('schedule_duration', (YLeaf(YType.uint32, 'schedule-duration'), ['int'])),
('schedule_interval', (YLeaf(YType.uint32, 'schedule-interval'), ['int'])),
])
self.start_time = None
self.start_time_configured = None
self.schedule_duration = None
self.schedule_interval = None
self._segment_path = lambda: "operation-schedule"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/profile-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationSchedule, ['start_time', 'start_time_configured', 'schedule_duration', 'schedule_interval'], name, value)
class OperationMetric(Entity):
"""
Array of the metrics that are measured by the
operation
.. attribute:: metric_config
Configuration of the metric
**type**\: :py:class:`MetricConfig <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig>`
.. attribute:: current_buckets_archive
Number of valid buckets currently in the buckets archive
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric, self).__init__()
self.yang_name = "operation-metric"
self.yang_parent_name = "profile-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("metric-config", ("metric_config", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig))])
self._leafs = OrderedDict([
('current_buckets_archive', (YLeaf(YType.uint32, 'current-buckets-archive'), ['int'])),
])
self.current_buckets_archive = None
self.metric_config = Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig()
self.metric_config.parent = self
self._children_name_map["metric_config"] = "metric-config"
self._segment_path = lambda: "operation-metric"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/profile-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric, ['current_buckets_archive'], name, value)
class MetricConfig(Entity):
"""
Configuration of the metric
.. attribute:: metric_type
Type of metric to which this configuration applies
**type**\: :py:class:`SlaRecordableMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaRecordableMetric>`
.. attribute:: bins_count
Total number of bins into which to aggregate. 0 if no aggregation
**type**\: int
**range:** 0..65535
.. attribute:: bins_width
Width of each bin into which to aggregate. 0 if no aggregation. For SLM, the units of this value are in single units of percent; for LMM they are in tenths of percent; for other measurements they are in milliseconds
**type**\: int
**range:** 0..65535
.. attribute:: bucket_size
Size of buckets into which measurements are collected
**type**\: int
**range:** 0..255
.. attribute:: bucket_size_unit
Whether bucket size is 'per\-probe' or 'probes'
**type**\: :py:class:`SlaBucketSize <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaBucketSize>`
.. attribute:: buckets_archive
Maximum number of buckets to store in memory
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig, self).__init__()
self.yang_name = "metric-config"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric_type', (YLeaf(YType.enumeration, 'metric-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetric', '')])),
('bins_count', (YLeaf(YType.uint16, 'bins-count'), ['int'])),
('bins_width', (YLeaf(YType.uint16, 'bins-width'), ['int'])),
('bucket_size', (YLeaf(YType.uint8, 'bucket-size'), ['int'])),
('bucket_size_unit', (YLeaf(YType.enumeration, 'bucket-size-unit'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSize', '')])),
('buckets_archive', (YLeaf(YType.uint32, 'buckets-archive'), ['int'])),
])
self.metric_type = None
self.bins_count = None
self.bins_width = None
self.bucket_size = None
self.bucket_size_unit = None
self.buckets_archive = None
self._segment_path = lambda: "metric-config"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/profile-options/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.ProfileOptions.OperationMetric.MetricConfig, ['metric_type', 'bins_count', 'bins_width', 'bucket_size', 'bucket_size_unit', 'buckets_archive'], name, value)
class SpecificOptions(Entity):
"""
Options specific to the type of operation
.. attribute:: configured_operation_options
Parameters for a configured operation
**type**\: :py:class:`ConfiguredOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions>`
.. attribute:: ondemand_operation_options
Parameters for an ondemand operation
**type**\: :py:class:`OndemandOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions>`
.. attribute:: oper_type
OperType
**type**\: :py:class:`SlaOperOperation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperOperation>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions, self).__init__()
self.yang_name = "specific-options"
self.yang_parent_name = "on-demand-operation"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("configured-operation-options", ("configured_operation_options", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions)), ("ondemand-operation-options", ("ondemand_operation_options", Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions))])
self._leafs = OrderedDict([
('oper_type', (YLeaf(YType.enumeration, 'oper-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperation', '')])),
])
self.oper_type = None
self.configured_operation_options = Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions()
self.configured_operation_options.parent = self
self._children_name_map["configured_operation_options"] = "configured-operation-options"
self.ondemand_operation_options = Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions()
self.ondemand_operation_options.parent = self
self._children_name_map["ondemand_operation_options"] = "ondemand-operation-options"
self._segment_path = lambda: "specific-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions, ['oper_type'], name, value)
class ConfiguredOperationOptions(Entity):
"""
Parameters for a configured operation
.. attribute:: profile_name
Name of the profile used by the operation
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions, self).__init__()
self.yang_name = "configured-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
])
self.profile_name = None
self._segment_path = lambda: "configured-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.ConfiguredOperationOptions, ['profile_name'], name, value)
class OndemandOperationOptions(Entity):
"""
Parameters for an ondemand operation
.. attribute:: ondemand_operation_id
ID of the ondemand operation
**type**\: int
**range:** 0..4294967295
.. attribute:: probe_count
Total number of probes sent during the operation
**type**\: int
**range:** 0..255
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions, self).__init__()
self.yang_name = "ondemand-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ondemand_operation_id', (YLeaf(YType.uint32, 'ondemand-operation-id'), ['int'])),
('probe_count', (YLeaf(YType.uint8, 'probe-count'), ['int'])),
])
self.ondemand_operation_id = None
self.probe_count = None
self._segment_path = lambda: "ondemand-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/on-demand-operations/on-demand-operation/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.OnDemandOperations.OnDemandOperation.SpecificOptions.OndemandOperationOptions, ['ondemand_operation_id', 'probe_count'], name, value)
class StatisticsCurrents(Entity):
"""
Table of current statistics for SLA operations
.. attribute:: statistics_current
Current statistics data for an SLA configured operation
**type**\: list of :py:class:`StatisticsCurrent <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents, self).__init__()
self.yang_name = "statistics-currents"
self.yang_parent_name = "ethernet"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("statistics-current", ("statistics_current", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent))])
self._leafs = OrderedDict()
self.statistics_current = YList(self)
self._segment_path = lambda: "statistics-currents"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents, [], name, value)
class StatisticsCurrent(Entity):
"""
Current statistics data for an SLA configured
operation
.. attribute:: profile_name
Profile Name
**type**\: str
**pattern:** [\\w\\\-\\.\:,\_@#%$\\+=\\\|;]+
.. attribute:: domain_name
Domain name
**type**\: str
.. attribute:: interface_name
Interface name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
.. attribute:: mep_id
MEP ID in the range 1 to 8191. Either MEP ID or MAC address must be specified
**type**\: int
**range:** 1..8191
.. attribute:: mac_address
Unicast MAC Address in xxxx.xxxx.xxxx format. Either MEP ID or MAC address must be specified
**type**\: str
**pattern:** [0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2}){5}
.. attribute:: specific_options
Options specific to the type of operation
**type**\: :py:class:`SpecificOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions>`
.. attribute:: operation_schedule
Operation schedule
**type**\: :py:class:`OperationSchedule <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule>`
.. attribute:: probe_type
Type of probe used by the operation
**type**\: str
.. attribute:: display_short
Short display name used by the operation
**type**\: str
.. attribute:: display_long
Long display name used by the operation
**type**\: str
.. attribute:: flr_calculation_interval
Interval between FLR calculations for SLM, in milliseconds
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: operation_metric
Metrics gathered for the operation
**type**\: list of :py:class:`OperationMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent, self).__init__()
self.yang_name = "statistics-current"
self.yang_parent_name = "statistics-currents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("specific-options", ("specific_options", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions)), ("operation-schedule", ("operation_schedule", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule)), ("operation-metric", ("operation_metric", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric))])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
('domain_name', (YLeaf(YType.str, 'domain-name'), ['str'])),
('interface_name', (YLeaf(YType.str, 'interface-name'), ['str'])),
('mep_id', (YLeaf(YType.uint32, 'mep-id'), ['int'])),
('mac_address', (YLeaf(YType.str, 'mac-address'), ['str'])),
('probe_type', (YLeaf(YType.str, 'probe-type'), ['str'])),
('display_short', (YLeaf(YType.str, 'display-short'), ['str'])),
('display_long', (YLeaf(YType.str, 'display-long'), ['str'])),
('flr_calculation_interval', (YLeaf(YType.uint32, 'flr-calculation-interval'), ['int'])),
])
self.profile_name = None
self.domain_name = None
self.interface_name = None
self.mep_id = None
self.mac_address = None
self.probe_type = None
self.display_short = None
self.display_long = None
self.flr_calculation_interval = None
self.specific_options = Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions()
self.specific_options.parent = self
self._children_name_map["specific_options"] = "specific-options"
self.operation_schedule = Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule()
self.operation_schedule.parent = self
self._children_name_map["operation_schedule"] = "operation-schedule"
self.operation_metric = YList(self)
self._segment_path = lambda: "statistics-current"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent, ['profile_name', 'domain_name', 'interface_name', 'mep_id', 'mac_address', 'probe_type', 'display_short', 'display_long', 'flr_calculation_interval'], name, value)
class SpecificOptions(Entity):
"""
Options specific to the type of operation
.. attribute:: configured_operation_options
Parameters for a configured operation
**type**\: :py:class:`ConfiguredOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions>`
.. attribute:: ondemand_operation_options
Parameters for an ondemand operation
**type**\: :py:class:`OndemandOperationOptions <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions>`
.. attribute:: oper_type
OperType
**type**\: :py:class:`SlaOperOperation <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperOperation>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions, self).__init__()
self.yang_name = "specific-options"
self.yang_parent_name = "statistics-current"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("configured-operation-options", ("configured_operation_options", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions)), ("ondemand-operation-options", ("ondemand_operation_options", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions))])
self._leafs = OrderedDict([
('oper_type', (YLeaf(YType.enumeration, 'oper-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperOperation', '')])),
])
self.oper_type = None
self.configured_operation_options = Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions()
self.configured_operation_options.parent = self
self._children_name_map["configured_operation_options"] = "configured-operation-options"
self.ondemand_operation_options = Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions()
self.ondemand_operation_options.parent = self
self._children_name_map["ondemand_operation_options"] = "ondemand-operation-options"
self._segment_path = lambda: "specific-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions, ['oper_type'], name, value)
class ConfiguredOperationOptions(Entity):
"""
Parameters for a configured operation
.. attribute:: profile_name
Name of the profile used by the operation
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions, self).__init__()
self.yang_name = "configured-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('profile_name', (YLeaf(YType.str, 'profile-name'), ['str'])),
])
self.profile_name = None
self._segment_path = lambda: "configured-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.ConfiguredOperationOptions, ['profile_name'], name, value)
class OndemandOperationOptions(Entity):
"""
Parameters for an ondemand operation
.. attribute:: ondemand_operation_id
ID of the ondemand operation
**type**\: int
**range:** 0..4294967295
.. attribute:: probe_count
Total number of probes sent during the operation
**type**\: int
**range:** 0..255
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions, self).__init__()
self.yang_name = "ondemand-operation-options"
self.yang_parent_name = "specific-options"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ondemand_operation_id', (YLeaf(YType.uint32, 'ondemand-operation-id'), ['int'])),
('probe_count', (YLeaf(YType.uint8, 'probe-count'), ['int'])),
])
self.ondemand_operation_id = None
self.probe_count = None
self._segment_path = lambda: "ondemand-operation-options"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/specific-options/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.SpecificOptions.OndemandOperationOptions, ['ondemand_operation_id', 'probe_count'], name, value)
class OperationSchedule(Entity):
"""
Operation schedule
.. attribute:: start_time
Start time of the first probe, in seconds since the Unix Epoch
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: start_time_configured
Whether or not the operation start time was explicitly configured
**type**\: bool
.. attribute:: schedule_duration
Duration of a probe for the operation in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: schedule_interval
Interval between the start times of consecutive probes, in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule, self).__init__()
self.yang_name = "operation-schedule"
self.yang_parent_name = "statistics-current"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('start_time', (YLeaf(YType.uint32, 'start-time'), ['int'])),
('start_time_configured', (YLeaf(YType.boolean, 'start-time-configured'), ['bool'])),
('schedule_duration', (YLeaf(YType.uint32, 'schedule-duration'), ['int'])),
('schedule_interval', (YLeaf(YType.uint32, 'schedule-interval'), ['int'])),
])
self.start_time = None
self.start_time_configured = None
self.schedule_duration = None
self.schedule_interval = None
self._segment_path = lambda: "operation-schedule"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationSchedule, ['start_time', 'start_time_configured', 'schedule_duration', 'schedule_interval'], name, value)
class OperationMetric(Entity):
"""
Metrics gathered for the operation
.. attribute:: config
Configuration of the metric
**type**\: :py:class:`Config <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config>`
.. attribute:: bucket
Buckets stored for the metric
**type**\: list of :py:class:`Bucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric, self).__init__()
self.yang_name = "operation-metric"
self.yang_parent_name = "statistics-current"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("config", ("config", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config)), ("bucket", ("bucket", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket))])
self._leafs = OrderedDict()
self.config = Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config()
self.config.parent = self
self._children_name_map["config"] = "config"
self.bucket = YList(self)
self._segment_path = lambda: "operation-metric"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric, [], name, value)
class Config(Entity):
"""
Configuration of the metric
.. attribute:: metric_type
Type of metric to which this configuration applies
**type**\: :py:class:`SlaRecordableMetric <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaRecordableMetric>`
.. attribute:: bins_count
Total number of bins into which to aggregate. 0 if no aggregation
**type**\: int
**range:** 0..65535
.. attribute:: bins_width
Width of each bin into which to aggregate. 0 if no aggregation. For SLM, the units of this value are in single units of percent; for LMM they are in tenths of percent; for other measurements they are in milliseconds
**type**\: int
**range:** 0..65535
.. attribute:: bucket_size
Size of buckets into which measurements are collected
**type**\: int
**range:** 0..255
.. attribute:: bucket_size_unit
Whether bucket size is 'per\-probe' or 'probes'
**type**\: :py:class:`SlaBucketSize <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaBucketSize>`
.. attribute:: buckets_archive
Maximum number of buckets to store in memory
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config, self).__init__()
self.yang_name = "config"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('metric_type', (YLeaf(YType.enumeration, 'metric-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaRecordableMetric', '')])),
('bins_count', (YLeaf(YType.uint16, 'bins-count'), ['int'])),
('bins_width', (YLeaf(YType.uint16, 'bins-width'), ['int'])),
('bucket_size', (YLeaf(YType.uint8, 'bucket-size'), ['int'])),
('bucket_size_unit', (YLeaf(YType.enumeration, 'bucket-size-unit'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaBucketSize', '')])),
('buckets_archive', (YLeaf(YType.uint32, 'buckets-archive'), ['int'])),
])
self.metric_type = None
self.bins_count = None
self.bins_width = None
self.bucket_size = None
self.bucket_size_unit = None
self.buckets_archive = None
self._segment_path = lambda: "config"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Config, ['metric_type', 'bins_count', 'bins_width', 'bucket_size', 'bucket_size_unit', 'buckets_archive'], name, value)
class Bucket(Entity):
"""
Buckets stored for the metric
.. attribute:: contents
The contents of the bucket; bins or samples
**type**\: :py:class:`Contents <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents>`
.. attribute:: start_at
Absolute time that the bucket started being filled at
**type**\: int
**range:** 0..4294967295
.. attribute:: duration
Length of time for which the bucket is being filled in seconds
**type**\: int
**range:** 0..4294967295
**units**\: second
.. attribute:: sent
Number of packets sent in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: lost
Number of lost packets in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: corrupt
Number of corrupt packets in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: out_of_order
Number of packets recieved out\-of\-order in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: duplicates
Number of duplicate packets received in the probe
**type**\: int
**range:** 0..4294967295
.. attribute:: minimum
Overall minimum result in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: maximum
Overall minimum result in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: time_of_minimum
Absolute time that the minimum value was recorded
**type**\: int
**range:** 0..4294967295
.. attribute:: time_of_maximum
Absolute time that the maximum value was recorded
**type**\: int
**range:** 0..4294967295
.. attribute:: average
Mean of the results in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: standard_deviation
Standard deviation of the results in the probe, in microseconds or millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: result_count
The count of samples collected in the bucket
**type**\: int
**range:** 0..4294967295
.. attribute:: data_sent_count
The number of data packets sent across the bucket, used in the calculation of overall FLR
**type**\: int
**range:** 0..4294967295
.. attribute:: data_lost_count
The number of data packets lost across the bucket, used in the calculation of overall FLR
**type**\: int
**range:** 0..4294967295
.. attribute:: overall_flr
Frame Loss Ratio across the whole bucket, in millionths of a percent
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: suspect_start_mid_bucket
Results suspect due to a probe starting mid\-way through a bucket
**type**\: bool
.. attribute:: suspect_schedule_latency
Results suspect due to scheduling latency causing one or more packets to not be sent
**type**\: bool
.. attribute:: suspect_send_fail
Results suspect due to failure to send one or more packets
**type**\: bool
.. attribute:: suspect_premature_end
Results suspect due to a probe ending prematurely
**type**\: bool
.. attribute:: suspect_clock_drift
Results suspect as more than 10 seconds time drift detected
**type**\: bool
.. attribute:: suspect_memory_allocation_failed
Results suspect due to a memory allocation failure
**type**\: bool
.. attribute:: suspect_cleared_mid_bucket
Results suspect as bucket was cleared mid\-way through being filled
**type**\: bool
.. attribute:: suspect_probe_restarted
Results suspect as probe restarted mid\-way through the bucket
**type**\: bool
.. attribute:: suspect_management_latency
Results suspect as processing of results has been delayed
**type**\: bool
.. attribute:: suspect_multiple_buckets
Results suspect as the probe has been configured across multiple buckets
**type**\: bool
.. attribute:: suspect_misordering
Results suspect as misordering has been detected , affecting results
**type**\: bool
.. attribute:: suspect_flr_low_packet_count
Results suspect as FLR calculated based on a low packet count
**type**\: bool
.. attribute:: premature_reason
If the probe ended prematurely, the error that caused a probe to end
**type**\: int
**range:** 0..4294967295
.. attribute:: premature_reason_string
Description of the error code that caused the probe to end prematurely. For informational purposes only
**type**\: str
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket, self).__init__()
self.yang_name = "bucket"
self.yang_parent_name = "operation-metric"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("contents", ("contents", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents))])
self._leafs = OrderedDict([
('start_at', (YLeaf(YType.uint32, 'start-at'), ['int'])),
('duration', (YLeaf(YType.uint32, 'duration'), ['int'])),
('sent', (YLeaf(YType.uint32, 'sent'), ['int'])),
('lost', (YLeaf(YType.uint32, 'lost'), ['int'])),
('corrupt', (YLeaf(YType.uint32, 'corrupt'), ['int'])),
('out_of_order', (YLeaf(YType.uint32, 'out-of-order'), ['int'])),
('duplicates', (YLeaf(YType.uint32, 'duplicates'), ['int'])),
('minimum', (YLeaf(YType.int32, 'minimum'), ['int'])),
('maximum', (YLeaf(YType.int32, 'maximum'), ['int'])),
('time_of_minimum', (YLeaf(YType.uint32, 'time-of-minimum'), ['int'])),
('time_of_maximum', (YLeaf(YType.uint32, 'time-of-maximum'), ['int'])),
('average', (YLeaf(YType.int32, 'average'), ['int'])),
('standard_deviation', (YLeaf(YType.int32, 'standard-deviation'), ['int'])),
('result_count', (YLeaf(YType.uint32, 'result-count'), ['int'])),
('data_sent_count', (YLeaf(YType.uint32, 'data-sent-count'), ['int'])),
('data_lost_count', (YLeaf(YType.uint32, 'data-lost-count'), ['int'])),
('overall_flr', (YLeaf(YType.int32, 'overall-flr'), ['int'])),
('suspect_start_mid_bucket', (YLeaf(YType.boolean, 'suspect-start-mid-bucket'), ['bool'])),
('suspect_schedule_latency', (YLeaf(YType.boolean, 'suspect-schedule-latency'), ['bool'])),
('suspect_send_fail', (YLeaf(YType.boolean, 'suspect-send-fail'), ['bool'])),
('suspect_premature_end', (YLeaf(YType.boolean, 'suspect-premature-end'), ['bool'])),
('suspect_clock_drift', (YLeaf(YType.boolean, 'suspect-clock-drift'), ['bool'])),
('suspect_memory_allocation_failed', (YLeaf(YType.boolean, 'suspect-memory-allocation-failed'), ['bool'])),
('suspect_cleared_mid_bucket', (YLeaf(YType.boolean, 'suspect-cleared-mid-bucket'), ['bool'])),
('suspect_probe_restarted', (YLeaf(YType.boolean, 'suspect-probe-restarted'), ['bool'])),
('suspect_management_latency', (YLeaf(YType.boolean, 'suspect-management-latency'), ['bool'])),
('suspect_multiple_buckets', (YLeaf(YType.boolean, 'suspect-multiple-buckets'), ['bool'])),
('suspect_misordering', (YLeaf(YType.boolean, 'suspect-misordering'), ['bool'])),
('suspect_flr_low_packet_count', (YLeaf(YType.boolean, 'suspect-flr-low-packet-count'), ['bool'])),
('premature_reason', (YLeaf(YType.uint32, 'premature-reason'), ['int'])),
('premature_reason_string', (YLeaf(YType.str, 'premature-reason-string'), ['str'])),
])
self.start_at = None
self.duration = None
self.sent = None
self.lost = None
self.corrupt = None
self.out_of_order = None
self.duplicates = None
self.minimum = None
self.maximum = None
self.time_of_minimum = None
self.time_of_maximum = None
self.average = None
self.standard_deviation = None
self.result_count = None
self.data_sent_count = None
self.data_lost_count = None
self.overall_flr = None
self.suspect_start_mid_bucket = None
self.suspect_schedule_latency = None
self.suspect_send_fail = None
self.suspect_premature_end = None
self.suspect_clock_drift = None
self.suspect_memory_allocation_failed = None
self.suspect_cleared_mid_bucket = None
self.suspect_probe_restarted = None
self.suspect_management_latency = None
self.suspect_multiple_buckets = None
self.suspect_misordering = None
self.suspect_flr_low_packet_count = None
self.premature_reason = None
self.premature_reason_string = None
self.contents = Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents()
self.contents.parent = self
self._children_name_map["contents"] = "contents"
self._segment_path = lambda: "bucket"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/operation-metric/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket, ['start_at', 'duration', 'sent', 'lost', 'corrupt', 'out_of_order', 'duplicates', 'minimum', 'maximum', 'time_of_minimum', 'time_of_maximum', 'average', 'standard_deviation', 'result_count', 'data_sent_count', 'data_lost_count', 'overall_flr', 'suspect_start_mid_bucket', 'suspect_schedule_latency', 'suspect_send_fail', 'suspect_premature_end', 'suspect_clock_drift', 'suspect_memory_allocation_failed', 'suspect_cleared_mid_bucket', 'suspect_probe_restarted', 'suspect_management_latency', 'suspect_multiple_buckets', 'suspect_misordering', 'suspect_flr_low_packet_count', 'premature_reason', 'premature_reason_string'], name, value)
class Contents(Entity):
"""
The contents of the bucket; bins or samples
.. attribute:: aggregated
Result bins in an SLA metric bucket
**type**\: :py:class:`Aggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated>`
.. attribute:: unaggregated
Result samples in an SLA metric bucket
**type**\: :py:class:`Unaggregated <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated>`
.. attribute:: bucket_type
BucketType
**type**\: :py:class:`SlaOperBucket <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper.SlaOperBucket>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents, self).__init__()
self.yang_name = "contents"
self.yang_parent_name = "bucket"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("aggregated", ("aggregated", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated)), ("unaggregated", ("unaggregated", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated))])
self._leafs = OrderedDict([
('bucket_type', (YLeaf(YType.enumeration, 'bucket-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ethernet_cfm_oper', 'SlaOperBucket', '')])),
])
self.bucket_type = None
self.aggregated = Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated()
self.aggregated.parent = self
self._children_name_map["aggregated"] = "aggregated"
self.unaggregated = Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated()
self.unaggregated.parent = self
self._children_name_map["unaggregated"] = "unaggregated"
self._segment_path = lambda: "contents"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/operation-metric/bucket/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents, ['bucket_type'], name, value)
class Aggregated(Entity):
"""
Result bins in an SLA metric bucket
.. attribute:: bins
The bins of an SLA metric bucket
**type**\: list of :py:class:`Bins <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated, self).__init__()
self.yang_name = "aggregated"
self.yang_parent_name = "contents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("bins", ("bins", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins))])
self._leafs = OrderedDict()
self.bins = YList(self)
self._segment_path = lambda: "aggregated"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/operation-metric/bucket/contents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated, [], name, value)
class Bins(Entity):
"""
The bins of an SLA metric bucket
.. attribute:: lower_bound
Lower bound (inclusive) of the bin, in milliseconds or single units of percent. This field is not used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: upper_bound
Upper bound (exclusive) of the bin, in milliseconds or single units of percent. This field is not used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: lower_bound_tenths
Lower bound (inclusive) of the bin, in tenths of percent. This field is only used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: upper_bound_tenths
Upper bound (exclusive) of the bin, in tenths of percent. This field is only used for LMM measurements
**type**\: int
**range:** \-2147483648..2147483647
**units**\: percentage
.. attribute:: sum
The sum of the results in the bin, in microseconds or millionths of a percent
**type**\: int
**range:** \-9223372036854775808..9223372036854775807
.. attribute:: count
The total number of results in the bin
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins, self).__init__()
self.yang_name = "bins"
self.yang_parent_name = "aggregated"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('lower_bound', (YLeaf(YType.int32, 'lower-bound'), ['int'])),
('upper_bound', (YLeaf(YType.int32, 'upper-bound'), ['int'])),
('lower_bound_tenths', (YLeaf(YType.int32, 'lower-bound-tenths'), ['int'])),
('upper_bound_tenths', (YLeaf(YType.int32, 'upper-bound-tenths'), ['int'])),
('sum', (YLeaf(YType.int64, 'sum'), ['int'])),
('count', (YLeaf(YType.uint32, 'count'), ['int'])),
])
self.lower_bound = None
self.upper_bound = None
self.lower_bound_tenths = None
self.upper_bound_tenths = None
self.sum = None
self.count = None
self._segment_path = lambda: "bins"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/operation-metric/bucket/contents/aggregated/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Aggregated.Bins, ['lower_bound', 'upper_bound', 'lower_bound_tenths', 'upper_bound_tenths', 'sum', 'count'], name, value)
class Unaggregated(Entity):
"""
Result samples in an SLA metric bucket
.. attribute:: sample
The samples of an SLA metric bucket
**type**\: list of :py:class:`Sample <ydk.models.cisco_ios_xr.Cisco_IOS_XR_infra_sla_oper.Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample>`
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated, self).__init__()
self.yang_name = "unaggregated"
self.yang_parent_name = "contents"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("sample", ("sample", Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample))])
self._leafs = OrderedDict()
self.sample = YList(self)
self._segment_path = lambda: "unaggregated"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/operation-metric/bucket/contents/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated, [], name, value)
class Sample(Entity):
"""
The samples of an SLA metric bucket
.. attribute:: sent_at
The time (in milliseconds relative to the start time of the bucket) that the sample was sent at
**type**\: int
**range:** 0..4294967295
**units**\: millisecond
.. attribute:: sent
Whether the sample packet was sucessfully sent
**type**\: bool
.. attribute:: timed_out
Whether the sample packet timed out
**type**\: bool
.. attribute:: corrupt
Whether the sample packet was corrupt
**type**\: bool
.. attribute:: out_of_order
Whether the sample packet was received out\-of\-order
**type**\: bool
.. attribute:: no_data_packets
Whether a measurement could not be made because no data packets were sent in the sample period. Only applicable for LMM measurements
**type**\: bool
.. attribute:: result
The result (in microseconds or millionths of a percent) of the sample, if available
**type**\: int
**range:** \-2147483648..2147483647
.. attribute:: frames_sent
For FLR measurements, the number of frames sent, if available
**type**\: int
**range:** 0..4294967295
.. attribute:: frames_lost
For FLR measurements, the number of frames lost, if available
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'ethernet-cfm-oper'
_revision = '2017-10-06'
def __init__(self):
super(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample, self).__init__()
self.yang_name = "sample"
self.yang_parent_name = "unaggregated"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sent_at', (YLeaf(YType.uint32, 'sent-at'), ['int'])),
('sent', (YLeaf(YType.boolean, 'sent'), ['bool'])),
('timed_out', (YLeaf(YType.boolean, 'timed-out'), ['bool'])),
('corrupt', (YLeaf(YType.boolean, 'corrupt'), ['bool'])),
('out_of_order', (YLeaf(YType.boolean, 'out-of-order'), ['bool'])),
('no_data_packets', (YLeaf(YType.boolean, 'no-data-packets'), ['bool'])),
('result', (YLeaf(YType.int32, 'result'), ['int'])),
('frames_sent', (YLeaf(YType.uint32, 'frames-sent'), ['int'])),
('frames_lost', (YLeaf(YType.uint32, 'frames-lost'), ['int'])),
])
self.sent_at = None
self.sent = None
self.timed_out = None
self.corrupt = None
self.out_of_order = None
self.no_data_packets = None
self.result = None
self.frames_sent = None
self.frames_lost = None
self._segment_path = lambda: "sample"
self._absolute_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla/protocols/Cisco-IOS-XR-ethernet-cfm-oper:ethernet/statistics-currents/statistics-current/operation-metric/bucket/contents/unaggregated/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Sla.Protocols.Ethernet.StatisticsCurrents.StatisticsCurrent.OperationMetric.Bucket.Contents.Unaggregated.Sample, ['sent_at', 'sent', 'timed_out', 'corrupt', 'out_of_order', 'no_data_packets', 'result', 'frames_sent', 'frames_lost'], name, value)
def clone_ptr(self):
self._top_entity = Sla()
return self._top_entity
class SlaNodes(Entity):
"""
sla nodes
"""
_prefix = 'infra-sla-oper'
_revision = '2015-11-09'
def __init__(self):
super(SlaNodes, self).__init__()
self._top_entity = None
self.yang_name = "sla-nodes"
self.yang_parent_name = "Cisco-IOS-XR-infra-sla-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict()
self._segment_path = lambda: "Cisco-IOS-XR-infra-sla-oper:sla-nodes"
self._is_frozen = True
def clone_ptr(self):
self._top_entity = SlaNodes()
return self._top_entity
| 60.325373
| 795
| 0.441255
| 27,985
| 383,971
| 5.82094
| 0.015937
| 0.034475
| 0.026949
| 0.015654
| 0.972627
| 0.966697
| 0.956887
| 0.948299
| 0.926519
| 0.916672
| 0
| 0.018104
| 0.476945
| 383,971
| 6,364
| 796
| 60.334852
| 0.792992
| 0.213428
| 0
| 0.826087
| 0
| 0.032814
| 0.18439
| 0.09936
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069319
| false
| 0
| 0.002051
| 0
| 0.108285
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6faea2d08e62d2a3c0add0ab0a9f80e7146aa7b7
| 9,766
|
py
|
Python
|
multiSeg/make_data/new_morph.py
|
vishalanand/MultiSeg
|
100318f18e61e53e2d5694ec6af600be0eec363f
|
[
"MIT"
] | 1
|
2020-09-30T14:45:45.000Z
|
2020-09-30T14:45:45.000Z
|
multiSeg/make_data/new_morph.py
|
vishalanand/MultiSeg
|
100318f18e61e53e2d5694ec6af600be0eec363f
|
[
"MIT"
] | null | null | null |
multiSeg/make_data/new_morph.py
|
vishalanand/MultiSeg
|
100318f18e61e53e2d5694ec6af600be0eec363f
|
[
"MIT"
] | null | null | null |
import json
from tqdm import tqdm
seg_morph_data = "../segmentations/morph_data/"
morph_maps = {
'list_SW_TL' : [
('../data/IARPA/SW/iarpa_sw.en', '../segmentations/morph_data/english-bitext-morph_sw.txt', '../segmentations/morph_map/sw_morph_3p_map.en', '../segmentations/morph_map/sw_morph_3_map.en'),
('../data/IARPA/SW/iarpa_sw.sw', '../segmentations/morph_data/swahili-bitext-morph_sw.txt', '../segmentations/morph_map/sw_morph_3p_map.sw', '../segmentations/morph_map/sw_morph_3_map.sw'),
('../data/IARPA/TL/iarpa_tl.en', '../segmentations/morph_data/english-bitext-morph_tl.txt', '../segmentations/morph_map/tl_morph_3p_map.en', '../segmentations/morph_map/tl_morph_3_map.en'),
('../data/IARPA/TL/iarpa_tl.tl', '../segmentations/morph_data/tagalog-bitext-morph_tl.txt', '../segmentations/morph_map/tl_morph_3p_map.tl', '../segmentations/morph_map/tl_morph_3_map.tl')],
'list_SW_TL_1_' : [
('../data/IARPA/SW/iarpa_sw.en', '../segmentations/morph_data/english-bitext-morph_sw.txt', '../segmentations/morph_map/sw_morph_3p_map.en', '../segmentations/morph_map/sw_morph_3_map.en')],
'list_morph_DE_12345' : [
('../data/Europarl/mini/1k_europarl.cldc.de', '../segmentations/morph_data/Europarl/mini/1k_europarl.cldc.de-seg.txt', '../segmentations/morph_map/Europarl/mini/1k_europarl.cldc.de-seg-3p_morph_map.de'),
('../data/Europarl/mini/1k_europarl.cldc.en', '../segmentations/morph_data/Europarl/mini/1k_europarl.cldc.en-seg.txt', '../segmentations/morph_map/Europarl/mini/1k_europarl.cldc.en-seg-3p_morph_map.en'),
('../data/Europarl/mini/1k_europarl.cldc.de', '../segmentations/morph_data/Europarl/mini/1k_europarl.cldc.de-seg-3.txt', '../segmentations/morph_map/Europarl/mini/1k_europarl.cldc.de-seg-3_morph_map.de'),
('../data/Europarl/mini/1k_europarl.cldc.en', '../segmentations/morph_data/Europarl/mini/1k_europarl.cldc.en-seg-3.txt', '../segmentations/morph_map/Europarl/mini/1k_europarl.cldc.en-seg-3_morph_map.en'),
('../data/Europarl/mini/10k_europarl.cldc.de', '../segmentations/morph_data/Europarl/mini/10k_europarl.cldc.de-seg.txt', '../segmentations/morph_map/Europarl/mini/10k_europarl.cldc.de-seg-3p_morph_map.de'),
('../data/Europarl/mini/10k_europarl.cldc.en', '../segmentations/morph_data/Europarl/mini/10k_europarl.cldc.en-seg.txt', '../segmentations/morph_map/Europarl/mini/10k_europarl.cldc.en-seg-3p_morph_map.en'),
('../data/Europarl/mini/10k_europarl.cldc.de', '../segmentations/morph_data/Europarl/mini/10k_europarl.cldc.de-seg-3.txt', '../segmentations/morph_map/Europarl/mini/10k_europarl.cldc.de-seg-3_morph_map.de'),
('../data/Europarl/mini/10k_europarl.cldc.en', '../segmentations/morph_data/Europarl/mini/10k_europarl.cldc.en-seg-3.txt', '../segmentations/morph_map/Europarl/mini/10k_europarl.cldc.en-seg-3_morph_map.en'),
('../data/Europarl/mini/100k_europarl.cldc.de', '../segmentations/morph_data/Europarl/mini/100k_europarl.cldc.de-seg.txt', '../segmentations/morph_map/Europarl/mini/100k_europarl.cldc.de-seg-3p_morph_map.de'),
('../data/Europarl/mini/100k_europarl.cldc.en', '../segmentations/morph_data/Europarl/mini/100k_europarl.cldc.en-seg.txt', '../segmentations/morph_map/Europarl/mini/100k_europarl.cldc.en-seg-3p_morph_map.en'),
('../data/Europarl/mini/100k_europarl.cldc.de', '../segmentations/morph_data/Europarl/mini/100k_europarl.cldc.de-seg-3.txt', '../segmentations/morph_map/Europarl/mini/100k_europarl.cldc.de-seg-3_morph_map.de'),
('../data/Europarl/mini/100k_europarl.cldc.en', '../segmentations/morph_data/Europarl/mini/100k_europarl.cldc.en-seg-3.txt', '../segmentations/morph_map/Europarl/mini/100k_europarl.cldc.en-seg-3_morph_map.en'),
('../data/Europarl/europarl.cldc.de', '../segmentations/morph_data/Europarl/europarl.cldc.de-seg.txt', '../segmentations/morph_map/Europarl/europarl.cldc.de-seg-3p_morph_map.de'),
('../data/Europarl/europarl.cldc.en', '../segmentations/morph_data/Europarl/europarl.cldc.en-seg.txt', '../segmentations/morph_map/Europarl/europarl.cldc.en-seg-3p_morph_map.en'),
('../data/Europarl/europarl.cldc.de', '../segmentations/morph_data/Europarl/europarl.cldc.de-seg-3.txt', '../segmentations/morph_map/Europarl/europarl.cldc.de-seg-3_morph_map.de'),
('../data/Europarl/europarl.cldc.en', '../segmentations/morph_data/Europarl/europarl.cldc.en-seg-3.txt', '../segmentations/morph_map/Europarl/europarl.cldc.en-seg-3_morph_map.en')],
'list_morph_DE_2_' : [
('../data/Europarl/mini/1k_europarl.cldc.de', '../segmentations/morph_data/Europarl/mini/1k_europarl.cldc.de-seg.txt', '../segmentations/morph_map/Europarl/mini/1k_europarl.cldc.de-seg-3p_morph_map.de')
]
}
for list_morph in morph_maps:
if(list_morph == 'list_morph_DE'):
for file_src, file_in, file_out in morph_maps[list_morph]:
print("{}\t{}\t{}\t{}".format(list_morph, file_src, file_in, file_out))
f1_src = open(file_src, "r")
f_o_word_recreate = open(file_out + ".word_recreate.txt", 'w')
f_o_stem_recreate = open(file_out + ".stem_recreate.txt", 'w')
word_map = {}
for line in tqdm(open(file_in)):
new_recreate_word_line = ""
new_recreate_stem_line = ""
line_src = f1_src.readline()
#line, line_src = line.strip(), line_src.strip()
line_split, line_src_split = line.split(), line_src.split()
assert(len(line_split) == len(line_src_split))
for word_id in range(len(line_src_split)):
word_map[line_src_split[word_id]] = [line_src_split[word_id]]
if(line_split[word_id].startswith('(')) and line_split[word_id].endswith(')'):
word_map[line_src_split[word_id]].append(line_split[word_id][1:-1])
new_recreate_stem_line = new_recreate_stem_line + line_split[word_id][1:-1]
new_recreate_word_line = new_recreate_word_line + line_split[word_id][1:-1]
else:
morph_split = line_split[word_id].split('+')
for morphing in morph_split:
if(morphing.startswith('(')) and morphing.endswith(')'):
word_map[line_src_split[word_id]].append(morphing[1:-1])
new_recreate_word_line = new_recreate_word_line + morphing[1:-1]
if(morphing == "()"):
morphing = "(<empty>)"
new_recreate_stem_line = new_recreate_stem_line + morphing[1:-1]
else:
word_map[line_src_split[word_id]].append(morphing)
new_recreate_word_line = new_recreate_word_line + morphing
new_recreate_stem_line = new_recreate_stem_line + ' '
new_recreate_word_line = new_recreate_word_line + ' '
f_o_stem_recreate.write(new_recreate_stem_line[:-1] + "\n")
f_o_word_recreate.write(new_recreate_word_line[:-1] + "\n")
with open(file_out, 'w') as f_o:
for key, value in word_map.items():
for word_morph_parts in value:
f_o.write(word_morph_parts + "\t")
f_o.write('\n')
f_o_stem_recreate.close()
f_o_word_recreate.close()
elif(list_morph == 'list_SW_TL'):
for file_src, file_in, file_out_3p, file_out_3 in morph_maps[list_morph]:
print("{}\t{}\t{}\t{}".format(list_morph, file_in, file_out_3p, file_out_3))
word_map = {}
word_map_3 = {}
f1_src = open(file_src, "r")
f_o_word_recreate = open(file_out_3p + ".word_recreate.txt", "w")
f_o_stem_recreate = open(file_out_3p + ".stem_recreate.txt", "w")
for line in open(file_in):
a = json.loads(line)
new_recreate_stem_line = ""
new_recreate_word_line = ""
for sentence_id in range(len(a)):
for word_id in range(len(a[sentence_id])):
prefix = ""
stem = ""
postfix = ""
word = a[sentence_id][word_id]["word"]
word = word.replace("-LRB-", "(").replace("-RRB-", ")")
word = '\'' if word == '`' else word
word_map[word] = [word]
word_map_3[word] = [word]
if(a[sentence_id][word_id]["prefixes"]):
a[sentence_id][word_id]["prefixes"] = a[sentence_id][word_id]["prefixes"].replace("-LRB-", "(").replace("-RRB-", ")")
for word_abc in a[sentence_id][word_id]["prefixes"].split('+'):
word_map[word].append(word_abc)
prefix = prefix + word_abc
word_map_3[word].append(prefix)
if(a[sentence_id][word_id]["stem"]):
stem = a[sentence_id][word_id]["stem"].replace("-LRB-", "(").replace("-RRB-", ")")
#stem = '\'' if stem == '`' else stem
word_map[word].append(stem)
word_map_3[word].append(stem)
if(stem == ""):
stem = "<EMPTY>"
if(a[sentence_id][word_id]["suffixes"]):
a[sentence_id][word_id]["suffixes"] = a[sentence_id][word_id]["suffixes"].replace("-LRB-", "(").replace("-RRB-", ")")
for word_abc in a[sentence_id][word_id]["suffixes"].split('+'):
word_map[word].append(word_abc)
postfix = postfix + word_abc
word_map_3[word].append(postfix)
new_recreate_stem_line = new_recreate_stem_line + stem + ' '
#new_recreate_word_line = new_recreate_word_line + prefix + stem + postfix + ' '
new_recreate_word_line = new_recreate_word_line + word + ' '
# new_recreate_word_line = new_recreate_word_line.replace('`` ', '\'\'')
# new_recreate_stem_line = new_recreate_stem_line.replace('`` ', '\'\'')
# new_recreate_word_line = new_recreate_word_line.replace(' ` ', ' \' ')
# new_recreate_stem_line = new_recreate_stem_line.replace(' ` ', ' \' ')
f_o_stem_recreate.write(new_recreate_stem_line[:-1] + "\n")
f_o_word_recreate.write(new_recreate_word_line[:-1] + "\n")
with open(file_out_3p, 'w') as f_o:
for key, value in word_map.items():
for word_morph_parts in value:
f_o.write(word_morph_parts + "\t")
f_o.write('\n')
with open(file_out_3, 'w') as f_o:
for key, value in word_map_3.items():
for word_morph_parts in value:
f_o.write(word_morph_parts + "\t")
f_o.write('\n')
f_o_stem_recreate.close()
f_o_word_recreate.close()
| 58.130952
| 212
| 0.702027
| 1,488
| 9,766
| 4.275538
| 0.058468
| 0.096196
| 0.089123
| 0.082993
| 0.862622
| 0.833386
| 0.805093
| 0.756052
| 0.707482
| 0.653411
| 0
| 0.017355
| 0.114991
| 9,766
| 167
| 213
| 58.479042
| 0.718732
| 0.045669
| 0
| 0.236641
| 0
| 0.015267
| 0.477019
| 0.436641
| 0
| 0
| 0
| 0
| 0.007634
| 1
| 0
| false
| 0
| 0.015267
| 0
| 0.015267
| 0.015267
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6fe239ed9c1d8217d6f2e85793e3b7824063cb76
| 260
|
py
|
Python
|
src/idom/web/__init__.py
|
jmtaysom/idom
|
d2a569d27f915d3b2b1fc6eb8eef9aca3a6d9343
|
[
"MIT"
] | 55
|
2019-02-28T23:58:42.000Z
|
2020-07-14T22:01:45.000Z
|
src/idom/web/__init__.py
|
jmtaysom/idom
|
d2a569d27f915d3b2b1fc6eb8eef9aca3a6d9343
|
[
"MIT"
] | 72
|
2019-04-04T18:46:30.000Z
|
2020-06-24T02:47:57.000Z
|
src/idom/web/__init__.py
|
jmtaysom/idom
|
d2a569d27f915d3b2b1fc6eb8eef9aca3a6d9343
|
[
"MIT"
] | 7
|
2019-04-02T17:53:30.000Z
|
2020-06-23T16:17:58.000Z
|
from .module import (
export,
module_from_file,
module_from_string,
module_from_template,
module_from_url,
)
__all__ = [
"module_from_file",
"module_from_string",
"module_from_template",
"module_from_url",
"export",
]
| 15.294118
| 27
| 0.665385
| 30
| 260
| 5.1
| 0.3
| 0.522876
| 0.183007
| 0.261438
| 0.797386
| 0.797386
| 0.797386
| 0.797386
| 0.797386
| 0.797386
| 0
| 0
| 0.234615
| 260
| 16
| 28
| 16.25
| 0.768844
| 0
| 0
| 0
| 0
| 0
| 0.288462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6fff45ad9f0504c097405090223224748825184b
| 234
|
py
|
Python
|
sandbox/payment/models.py
|
thelabnyc/django-oscar-cybersource
|
510ecdc045edcf93ff7a62a120cb1eeaa56f40a9
|
[
"0BSD"
] | 3
|
2016-06-18T01:37:50.000Z
|
2021-02-08T04:07:11.000Z
|
sandbox/payment/models.py
|
thelabnyc/django-oscar-cybersource
|
510ecdc045edcf93ff7a62a120cb1eeaa56f40a9
|
[
"0BSD"
] | 24
|
2019-12-04T21:37:21.000Z
|
2022-03-11T23:15:43.000Z
|
sandbox/payment/models.py
|
thelabnyc/django-oscar-cybersource
|
510ecdc045edcf93ff7a62a120cb1eeaa56f40a9
|
[
"0BSD"
] | 3
|
2016-05-31T10:02:30.000Z
|
2017-09-01T10:55:20.000Z
|
from oscar.apps.payment.abstract_models import AbstractTransaction
from cybersource.models import TransactionMixin
class Transaction(TransactionMixin, AbstractTransaction):
pass
from oscar.apps.payment.models import * # noqa
| 23.4
| 66
| 0.82906
| 25
| 234
| 7.72
| 0.56
| 0.186529
| 0.134715
| 0.207254
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 234
| 9
| 67
| 26
| 0.932367
| 0.017094
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
b5035093f827bdcd043e99f04153014c778ca627
| 50,831
|
py
|
Python
|
pair_fast_forcast/pairwise_fusion_kd/utils/FaFModule.py
|
Chezacar/CollaborationWithLatency
|
da06abea16f1ffcafc35d27cb69ae3116a345965
|
[
"MIT"
] | null | null | null |
pair_fast_forcast/pairwise_fusion_kd/utils/FaFModule.py
|
Chezacar/CollaborationWithLatency
|
da06abea16f1ffcafc35d27cb69ae3116a345965
|
[
"MIT"
] | null | null | null |
pair_fast_forcast/pairwise_fusion_kd/utils/FaFModule.py
|
Chezacar/CollaborationWithLatency
|
da06abea16f1ffcafc35d27cb69ae3116a345965
|
[
"MIT"
] | null | null | null |
import torch.nn.functional as F
import torch.nn as nn
import torch
from utils.FaFModel import FaFNet,FaFMIMONet_512_16_16, FaFMIMONet_512_16_16_KD, FaFMIMONet_256_32_32, FaFMIMONet_256_32_32_KD, FaFMIMONet_128_64_64, \
FaFMIMONet_128_64_64_KD, FaFMIMONet_64_128_128,FaFMIMONet_32_256_256, FaFMIMONet_layer_3_and_4, FeatEncoder,FaFMGDA
from utils.detection_util import *
from utils.min_norm_solvers import MinNormSolver
import numpy
import matplotlib.pyplot as plt
from data.obj_util import coor_to_vis
class FaFModule(object):
def __init__(self, model,config,optimizer,criterion):
self.MGDA = config.MGDA
if self.MGDA:
self.encoder = model[0]
self.head = model[1]
self.optimizer_encoder = optimizer[0]
self.optimizer_head = optimizer[1]
self.scheduler_encoder = torch.optim.lr_scheduler.MultiStepLR(self.optimizer_encoder, milestones=[50, 100, 150, 200], gamma=0.5)
self.scheduler_head = torch.optim.lr_scheduler.MultiStepLR(self.optimizer_head, milestones=[50, 100, 150, 200], gamma=0.5)
self.MGDA = config.MGDA
else:
self.model = model
self.optimizer = optimizer
self.scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[50, 100, 150, 200], gamma=0.5)
self.criterion = criterion#{'cls_loss','loc_loss'}
self.out_seq_len = config.pred_len
self.category_num = config.category_num
self.code_size = config.box_code_size
self.loss_scale = None
self.code_type = config.code_type
self.loss_type= config.loss_type
self.pred_len = config.pred_len
self.only_det = config.only_det
if self.code_type in ['corner_1','corner_2','corner_3']:
self.alpha = 1.
elif self.code_type == 'faf':
if self.loss_type == 'corner_loss':
self.alpha= 1.
if not self.only_det:
self.alpha = 1.
else:
self.alpha = 0.1
self.config = config
def resume(self,path):
def map_func(storage, location):
return storage.cuda()
if os.path.isfile(path):
if rank == 0:
print("=> loading checkpoint '{}'".format(path))
checkpoint = torch.load(path, map_location=map_func)
self.model.load_state_dict(checkpoint['state_dict'], strict=False)
ckpt_keys = set(checkpoint['state_dict'].keys())
own_keys = set(model.state_dict().keys())
missing_keys = own_keys - ckpt_keys
for k in missing_keys:
print('caution: missing keys from checkpoint {}: {}'.format(path, k))
else:
print("=> no checkpoint found at '{}'".format(path))
def corner_loss(self,anchors,reg_loss_mask,reg_targets,pred_result):
N = pred_result.shape[0]
anchors = anchors.unsqueeze(-2).expand(anchors.shape[0],anchors.shape[1],anchors.shape[2],anchors.shape[3],reg_loss_mask.shape[-1],anchors.shape[-1])
assigned_anchor = anchors[reg_loss_mask]
assigned_target = reg_targets[reg_loss_mask]
assigned_pred = pred_result[reg_loss_mask]
#print(assigned_anchor.shape,assigned_pred.shape,assigned_target.shape)
#exit()
pred_decode = bev_box_decode_torch(assigned_pred,assigned_anchor)
target_decode = bev_box_decode_torch(assigned_target,assigned_anchor)
pred_corners = center_to_corner_box2d_torch(pred_decode[...,:2],pred_decode[...,2:4],pred_decode[...,4:])
target_corners = center_to_corner_box2d_torch(target_decode[...,:2],target_decode[...,2:4],target_decode[...,4:])
loss_loc = torch.sum(torch.norm(pred_corners-target_corners,dim=-1)) / N
return loss_loc
def loss_calculator(self,result,anchors,reg_loss_mask,reg_targets,labels,N,motion_labels = None,motion_mask=None):
loss_num =0
# calculate loss
weights = torch.Tensor([0.005, 1.0, 1.0, 1.0, 1.0]).cuda().double()
loss_cls = torch.sum(self.criterion['cls'](result['cls'],labels)) /N
loss_num += 1
#loss_loc = torch.sum(self.criterion['loc'](result['loc'],reg_targets,mask = reg_loss_mask)) / N
#Motion state
if not motion_labels is None:
loss_motion = torch.sum(self.criterion['cls'](result['state'],motion_labels)) /N
loss_num += 1
loss_mask_num = torch.nonzero(reg_loss_mask.contiguous().view(-1,reg_loss_mask.shape[-1])).size(0)
#print(loss_mask_num)
#print(torch.sum(reg_targets[:,:,:,:,0][reg_loss_mask[:,:,:,:,2]]))
if self.code_type in ['corner_1','corner_2','corner_3']:
target = reg_targets[reg_loss_mask].reshape(-1,5,2)
flip_target = torch.stack([target[:,0],target[:,3],target[:,4],target[:,1],target[:,2]],dim=-2)
pred = result['loc'][reg_loss_mask].reshape(-1,5,2)
t = torch.sum(torch.norm(pred-target,dim=-1),dim=-1)
f = torch.sum(torch.norm(pred-flip_target,dim=-1),dim=-1)
loss_loc = torch.sum(torch.min(t,f)) / N
elif self.code_type == 'faf':
if self.loss_type == 'corner_loss':
if self.only_det:
loss_loc = self.corner_loss(anchors,reg_loss_mask,reg_targets,result['loc'])
loss_num += 1
elif self.config.pred_type in ['motion','center']:
###only center/motion for pred
loss_loc_1 = self.corner_loss(anchors,reg_loss_mask[...,0][...,[0]],reg_targets[...,[0],:],result['loc'][...,[0],:])
pred_reg_loss_mask = reg_loss_mask[...,1:,:]
if self.config.motion_state:
pred_reg_loss_mask = motion_mask #mask out static object
loss_loc_2 = F.smooth_l1_loss(result['loc'][...,1:,:][pred_reg_loss_mask],reg_targets[...,1:,:][pred_reg_loss_mask])
loss_loc = loss_loc_1 + loss_loc_2
loss_num += 2
###corners for pred
else:
loss_loc = self.corner_loss(anchors,reg_loss_mask,reg_targets,result['loc'])
loss_num += 1
else:
loss_loc = F.smooth_l1_loss(result['loc'][reg_loss_mask],reg_targets[reg_loss_mask])
loss_num += 1
if self.loss_scale is not None:
if len(self.loss_scale)==4:
loss = self.loss_scale[0]*loss_cls + self.loss_scale[1]*loss_loc_1 + self.loss_scale[2]*loss_loc_2 + self.loss_scale[3]*loss_motion
elif len(self.loss_scale)==3:
loss = self.loss_scale[0]*loss_cls + self.loss_scale[1]*loss_loc_1 + self.loss_scale[2]*loss_loc_2
else:
loss = self.loss_scale[0]*loss_cls + self.loss_scale[1]*loss_loc
elif not motion_labels is None:
loss = loss_cls + loss_loc + loss_motion
else:
loss = loss_cls + loss_loc
if loss_num == 2:
return (loss_num,loss, loss_cls,loss_loc)
elif loss_num == 3:
return (loss_num,loss, loss_cls,loss_loc_1,loss_loc_2)
elif loss_num == 4:
return (loss_num,loss, loss_cls,loss_loc_1,loss_loc_2,loss_motion)
def step(self,data,batch_size, center_agent, forcast_num):
bev_seq = data['bev_seq']
labels = data['labels']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
anchors = data['anchors']
vis_maps = data['vis_maps']
trans_matrices = data['trans_matrices']
num_agent = data['num_agent']
newest_time = torch.zeros((batch_size, 1))
delta_t = []
for iter in range(batch_size):
newest_time[iter] = int(data['file_name'][center_agent[iter]][iter].split(',')[-1].strip('/').split('_')[-1])
delta_t_temp = []
for iter_num in range(5):
delta_t_temp.append(newest_time[iter] - int(data['file_name'][iter_num][iter].split(',')[-1].strip('/').split('_')[-1]))
delta_t.append(delta_t_temp)
# with torch.autograd.set_detect_anomaly(True):
if self.MGDA:
self.loss_scale = self.cal_loss_scale(data)
x = self.encoder(bev_seq)
result = self.head(x)
else:
result = self.model(bev_seq, trans_matrices, num_agent, batch_size=batch_size, center_agent = center_agent, delta_t = delta_t)
# labels = labels[:,-1]
# anchors = anchors[:, -1]
# reg_loss_mask = reg_loss_mask[:, -1]
# reg_targets = reg_targets[:,-1]
labels = labels.view(result['cls'].shape[0],-1,result['cls'].shape[-1])
N = bev_seq.shape[0]
loss_collect = self.loss_calculator(result,anchors,reg_loss_mask,reg_targets,labels,N)
loss_num = loss_collect[0]
if loss_num == 3:
loss_num,loss, loss_cls,loss_loc_1,loss_loc_2 = loss_collect
elif loss_num ==2:
loss_num,loss, loss_cls,loss_loc = loss_collect
elif loss_num == 4:
loss_num,loss, loss_cls,loss_loc_1,loss_loc_2,loss_motion = loss_collect
if self.MGDA:
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss.backward()
self.optimizer_encoder.step()
self.optimizer_head.step()
else:
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
if self.config.pred_type in ['motion','center'] and not self.only_det:
if self.config.motion_state:
return loss.item(),loss_cls.item(),loss_loc_1.item(),loss_loc_2.item(), loss_motion.item()
else:
return loss.item(),loss_cls.item(),loss_loc_1.item(),loss_loc_2.item()
else:
return loss.item(),loss_cls.item(),loss_loc.item()
def predict(self,data,validation=True):
bev_seq = data['bev_seq']
vis_maps = data['vis_maps']
if self.MGDA:
x = self.encoder(bev_seq)
result = self.head(x)
else:
result = self.model(bev_seq,vis=vis_maps)
N = bev_seq.shape[0]
if validation:
labels = data['labels']
anchors = data['anchors']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
motion_labels = None
motion_mask = None
labels = labels.view(result['cls'].shape[0],-1,result['cls'].shape[-1])
if self.config.motion_state:
motion_labels = data['motion_label']
motion_mask = data['motion_mask']
motion_labels = motion_labels.view(result['state'].shape[0],-1,result['state'].shape[-1])
N = bev_seq.shape[0]
loss_collect = self.loss_calculator(result,anchors,reg_loss_mask,reg_targets,labels,N,motion_labels,motion_mask)
loss_num = loss_collect[0]
if loss_num == 3:
loss_num,loss, loss_cls,loss_loc_1,loss_loc_2 = loss_collect
elif loss_num ==2:
loss_num,loss, loss_cls,loss_loc = loss_collect
elif loss_num == 4:
loss_num,loss, loss_cls,loss_loc_1,loss_loc_2,loss_motion = loss_collect
batch_box_preds = result['loc']
batch_cls_preds = result['cls']
if self.config.motion_state:
batch_motion_preds = result['state']
else:
batch_motion_preds = None
if not self.only_det:
if self.config.pred_type == 'center':
batch_box_preds[:,:,:,:,1:,2:] = batch_box_preds[:,:,:,:,[0],2:]
class_selected = apply_nms_det(batch_box_preds, batch_cls_preds,anchors,self.code_type,self.config,batch_motion_preds)
#class_selected = None
if validation:
if self.config.pred_type in ['motion','center'] and not self.only_det:
if self.config.motion_state:
return loss.item(),loss_cls.item(),loss_loc_1.item(),loss_loc_2.item(), loss_motion.item(),class_selected
else:
return loss.item(),loss_cls.item(),loss_loc_1.item(),loss_loc_2.item(),class_selected
else:
return loss.item(),loss_cls.item(),loss_loc.item(),class_selected
else:
return class_selected
def predict_all(self,data,batch_size,validation=True, center_agent = 0):
NUM_AGENT = 5
bev_seq = data['bev_seq']
vis_maps = data['vis_maps']
trans_matrices = data['trans_matrices']
num_agent_tensor = data['num_agent']
num_sensor = num_agent_tensor[0, 0]
if self.MGDA:
x = self.encoder(bev_seq)
result = self.head(x)
else:
with torch.no_grad():
result= self.model(bev_seq, trans_matrices, num_agent_tensor, batch_size=batch_size, center_agent = center_agent)
# result = self.model(bev_seq,vis=vis_maps,training=False)
#
N = bev_seq.shape[0]
if validation:
labels = data['labels']
anchors = data['anchors']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
motion_labels = None
motion_mask = None
labels = labels.view(result['cls'].shape[0],-1,result['cls'].shape[-1])
if self.config.motion_state:
motion_labels = data['motion_label']
motion_mask = data['motion_mask']
motion_labels = motion_labels.view(result['state'].shape[0],-1,result['state'].shape[-1])
N = bev_seq.shape[0]
loss_collect = self.loss_calculator(result,anchors,reg_loss_mask,reg_targets,labels,N,motion_labels,motion_mask)
loss_num = loss_collect[0]
if loss_num == 3:
loss_num,loss, loss_cls,loss_loc_1,loss_loc_2 = loss_collect
elif loss_num ==2:
loss_num,loss, loss_cls,loss_loc = loss_collect
elif loss_num == 4:
loss_num,loss, loss_cls,loss_loc_1,loss_loc_2,loss_motion = loss_collect
seq_results = [[] for i in range(NUM_AGENT)]
# global_points = [[] for i in range(num_sensor)]
# cls_preds = [[] for i in range(num_sensor)]
for k in range(NUM_AGENT):
bev_seq = torch.unsqueeze(data['bev_seq'][k, :, :, :, :], 0)
if torch.nonzero(bev_seq).shape[0] == 0:
seq_results[k] = []
else:
batch_box_preds = torch.unsqueeze(result['loc'][k, :, :, :, :, :],0)
batch_cls_preds = torch.unsqueeze(result['cls'][k, :, :], 0)
anchors = torch.unsqueeze(data['anchors'][k, :, :, :, :],0)
batch_motion_preds = None
if not self.only_det:
if self.config.pred_type == 'center':
batch_box_preds[:,:,:,:,1:,2:] = batch_box_preds[:,:,:,:,[0],2:]
class_selected = apply_nms_det(batch_box_preds, batch_cls_preds,anchors,self.code_type,self.config,batch_motion_preds)
seq_results[k] = class_selected
# global_points[k], cls_preds[k] = apply_box_global_transform(trans_matrices_map[k],batch_box_preds,batch_cls_preds,anchors,self.code_type,self.config,batch_motion_preds)
# all_points_scene = numpy.concatenate(tuple(global_points), 0)
# cls_preds_scene = torch.cat(tuple(cls_preds), 0)
# class_selected_global = apply_nms_global_scene(all_points_scene, cls_preds_scene)
if validation:
return loss.item(),loss_cls.item(),loss_loc.item(),seq_results
else:
return seq_results
def predict_all_with_box_com(self, data, trans_matrices_map, validation=True):
NUM_AGENT = 5
bev_seq = data['bev_seq']
vis_maps = data['vis_maps']
trans_matrices = data['trans_matrices']
num_agent_tensor = data['num_agent']
num_sensor = num_agent_tensor[0, 0]
if self.MGDA:
x = self.encoder(bev_seq)
result = self.head(x)
else:
result = self.model(bev_seq, trans_matrices, num_agent_tensor, batch_size=1)
N = bev_seq.shape[0]
if validation:
labels = data['labels']
anchors = data['anchors']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
motion_labels = None
motion_mask = None
labels = labels.view(result['cls'].shape[0], -1, result['cls'].shape[-1])
if self.config.motion_state:
motion_labels = data['motion_label']
motion_mask = data['motion_mask']
motion_labels = motion_labels.view(result['state'].shape[0], -1, result['state'].shape[-1])
N = bev_seq.shape[0]
loss_collect = self.loss_calculator(result, anchors, reg_loss_mask, reg_targets, labels, N, motion_labels,
motion_mask)
loss_num = loss_collect[0]
if loss_num == 3:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2 = loss_collect
elif loss_num == 2:
loss_num, loss, loss_cls, loss_loc = loss_collect
elif loss_num == 4:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2, loss_motion = loss_collect
seq_results = [[] for i in range(NUM_AGENT)]
local_results_wo_local_nms = [[] for i in range(NUM_AGENT)]
local_results_af_local_nms = [[] for i in range(NUM_AGENT)]
global_points = [[] for i in range(num_sensor)]
cls_preds = [[] for i in range(num_sensor)]
global_boxes_af_localnms = [[] for i in range(num_sensor)]
box_scores_af_localnms = [[] for i in range(num_sensor)]
forward_message_size = 0
forward_message_size_two_nms = 0
for k in range(NUM_AGENT):
bev_seq = torch.unsqueeze(data['bev_seq'][k, :, :, :, :], 0)
if torch.nonzero(bev_seq).shape[0] == 0:
seq_results[k] = []
else:
batch_box_preds = torch.unsqueeze(result['loc'][k, :, :, :, :, :], 0)
batch_cls_preds = torch.unsqueeze(result['cls'][k, :, :], 0)
anchors = torch.unsqueeze(data['anchors'][k, :, :, :, :], 0)
if self.config.motion_state:
batch_motion_preds = result['state']
else:
batch_motion_preds = None
if not self.only_det:
if self.config.pred_type == 'center':
batch_box_preds[:, :, :, :, 1:, 2:] = batch_box_preds[:, :, :, :, [0], 2:]
class_selected, box_scores_pred_cls = apply_nms_det(batch_box_preds, batch_cls_preds, anchors,
self.code_type, self.config, batch_motion_preds)
# transform all the boxes before local nms to the global coordinate
# global_points[k], cls_preds[k] = apply_box_global_transform(trans_matrices_map[k], batch_box_preds,
# batch_cls_preds, anchors, self.code_type,
# self.config, batch_motion_preds)
# transform the boxes after local nms to the global coordinate
global_boxes_af_localnms[k], box_scores_af_localnms[k] = apply_box_global_transform_af_localnms(
trans_matrices_map[k], class_selected, box_scores_pred_cls)
# print(cls_preds[k].shape, box_scores_af_localnms[k].shape)
forward_message_size = forward_message_size + 256 * 256 * 6 * 4 * 2
forward_message_size_two_nms = forward_message_size_two_nms + global_boxes_af_localnms[k].shape[
0] * 4 * 2
# global results with one NMS
# all_points_scene = numpy.concatenate(tuple(global_points), 0)
# cls_preds_scene = torch.cat(tuple(cls_preds), 0)
# class_selected_global = apply_nms_global_scene(all_points_scene, cls_preds_scene)
# global results with two NMS
global_boxes_af_local_nms = numpy.concatenate(tuple(global_boxes_af_localnms), 0)
box_scores_af_local_nms = torch.cat(tuple(box_scores_af_localnms), 0)
class_selected_global_af_local_nms = apply_nms_global_scene(global_boxes_af_local_nms, box_scores_af_local_nms)
# transform the consensus global boxes to local agents (two NMS)
back_message_size_two_nms = 0
for k in range(num_sensor):
local_results_af_local_nms[k], ms = apply_box_local_transform(class_selected_global_af_local_nms,
trans_matrices_map[k])
back_message_size_two_nms = back_message_size_two_nms + ms
sample_bandwidth_two_nms = forward_message_size_two_nms + back_message_size_two_nms
# transform the consensus global boxes to local agents (One NMS)
# back_message_size = 0
# for k in range(num_sensor):
# local_results_wo_local_nms[k], ms = apply_box_local_transform(class_selected_global, trans_matrices_map[k])
# back_message_size = back_message_size + ms
# sample_bandwidth = forward_message_size + back_message_size
return loss.item(), loss_cls.item(), loss_loc.item(), local_results_af_local_nms, class_selected_global_af_local_nms, sample_bandwidth_two_nms
def cal_loss_scale(self,data):
bev_seq = data['bev_seq']
labels = data['labels']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
anchors = data['anchors']
motion_labels = None
motion_mask = None
with torch.no_grad():
shared_feats = self.encoder(bev_seq)
shared_feats_tensor = shared_feats.clone().detach().requires_grad_(True)
result = self.head(shared_feats_tensor)
if self.config.motion_state:
motion_labels = data['motion_label']
motion_mask = data['motion_mask']
motion_labels = motion_labels.view(result['state'].shape[0],-1,result['state'].shape[-1])
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
grads = {}
labels = labels.view(result['cls'].shape[0],-1,result['cls'].shape[-1])
N = bev_seq.shape[0]
# calculate loss
grad_len = 0
'''
Classification Loss
'''
loss_cls = self.alpha*torch.sum(self.criterion['cls'](result['cls'],labels)) /N
#loss_loc = torch.sum(self.criterion['loc'](result['loc'],reg_targets,mask = reg_loss_mask)) / N
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_cls.backward(retain_graph=True)
grads[0] = []
grads[0].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
grad_len += 1
'''
Localization Loss
'''
loc_scale = False
loss_mask_num = torch.nonzero(reg_loss_mask.view(-1,reg_loss_mask.shape[-1])).size(0)
if self.code_type in ['corner_1','corner_2','corner_3']:
target = reg_targets[reg_loss_mask].reshape(-1,5,2)
flip_target = torch.stack([target[:,0],target[:,3],target[:,4],target[:,1],target[:,2]],dim=-2)
pred = result['loc'][reg_loss_mask].reshape(-1,5,2)
t = torch.sum(torch.norm(pred-target,dim=-1),dim=-1)
f = torch.sum(torch.norm(pred-flip_target,dim=-1),dim=-1)
loss_loc = torch.sum(torch.min(t,f)) / N
elif self.code_type == 'faf':
if self.loss_type == 'corner_loss':
if self.only_det:
loss_loc = self.corner_loss(anchors,reg_loss_mask,reg_targets,result['loc'])
elif self.config.pred_type in ['motion','center']:
###only center/motion for pred
loss_loc_1 = self.corner_loss(anchors,reg_loss_mask[...,0][...,[0]],reg_targets[...,[0],:],result['loc'][...,[0],:])
pred_reg_loss_mask = reg_loss_mask[...,1:,:]
if self.config.motion_state:
pred_reg_loss_mask = motion_mask #mask out static object
loss_loc_2 = F.smooth_l1_loss(result['loc'][...,1:,:][pred_reg_loss_mask],reg_targets[...,1:,:][pred_reg_loss_mask])
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_loc_1.backward(retain_graph=True)
grads[1] = []
grads[1].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_loc_2.backward(retain_graph=True)
grads[2] = []
grads[2].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
loc_scale = True
grad_len += 2
###corners for pred
else:
loss_loc = self.corner_loss(anchors,reg_loss_mask,reg_targets,result['loc'])
else:
loss_loc = F.smooth_l1_loss(result['loc'][reg_loss_mask],reg_targets[reg_loss_mask])
if not loc_scale:
grad_len += 1
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_loc.backward(retain_graph=True)
grads[1] = []
grads[1].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
'''
Motion state Loss
'''
if self.config.motion_state:
loss_motion = torch.sum(self.criterion['cls'](result['state'],motion_labels)) /N
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_motion.backward(retain_graph=True)
grads[3] = []
grads[3].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
grad_len += 1
# ---------------------------------------------------------------------
# -- Frank-Wolfe iteration to compute scales.
scale = np.zeros(grad_len, dtype=np.float32)
sol, min_norm = MinNormSolver.find_min_norm_element([grads[t] for t in range(grad_len)])
for i in range(grad_len):
scale[i] = float(sol[i])
#print(scale)
return scale
class FaFModuleKD(object):
def __init__(self, model, teacher, config, optimizer, criterion):
self.MGDA = config.MGDA
if self.MGDA:
self.encoder = model[0]
self.head = model[1]
self.optimizer_encoder = optimizer[0]
self.optimizer_head = optimizer[1]
self.scheduler_encoder = torch.optim.lr_scheduler.MultiStepLR(self.optimizer_encoder,
milestones=[50, 100, 150, 200], gamma=0.5)
self.scheduler_head = torch.optim.lr_scheduler.MultiStepLR(self.optimizer_head,
milestones=[50, 100, 150, 200], gamma=0.5)
self.MGDA = config.MGDA
else:
self.model = model
self.teacher = teacher
for k, v in self.teacher.named_parameters():
v.requires_grad = False
self.optimizer = optimizer
self.scheduler = torch.optim.lr_scheduler.MultiStepLR(optimizer, milestones=[50, 100, 150, 200], gamma=0.5)
self.criterion = criterion # {'cls_loss','loc_loss'}
self.out_seq_len = config.pred_len
self.category_num = config.category_num
self.code_size = config.box_code_size
self.loss_scale = None
self.code_type = config.code_type
self.loss_type = config.loss_type
self.pred_len = config.pred_len
self.only_det = config.only_det
if self.code_type in ['corner_1', 'corner_2', 'corner_3']:
self.alpha = 1.
elif self.code_type == 'faf':
if self.loss_type == 'corner_loss':
self.alpha = 1.
if not self.only_det:
self.alpha = 1.
else:
self.alpha = 0.1
self.config = config
def resume(self, path):
def map_func(storage, location):
return storage.cuda()
if os.path.isfile(path):
if rank == 0:
print("=> loading checkpoint '{}'".format(path))
checkpoint = torch.load(path, map_location=map_func)
self.model.load_state_dict(checkpoint['state_dict'], strict=False)
ckpt_keys = set(checkpoint['state_dict'].keys())
own_keys = set(model.state_dict().keys())
missing_keys = own_keys - ckpt_keys
for k in missing_keys:
print('caution: missing keys from checkpoint {}: {}'.format(path, k))
else:
print("=> no checkpoint found at '{}'".format(path))
def corner_loss(self, anchors, reg_loss_mask, reg_targets, pred_result):
N = pred_result.shape[0]
anchors = anchors.unsqueeze(-2).expand(anchors.shape[0], anchors.shape[1], anchors.shape[2], anchors.shape[3],
reg_loss_mask.shape[-1], anchors.shape[-1])
assigned_anchor = anchors[reg_loss_mask]
assigned_target = reg_targets[reg_loss_mask]
assigned_pred = pred_result[reg_loss_mask]
# print(assigned_anchor.shape,assigned_pred.shape,assigned_target.shape)
# exit()
pred_decode = bev_box_decode_torch(assigned_pred, assigned_anchor)
target_decode = bev_box_decode_torch(assigned_target, assigned_anchor)
pred_corners = center_to_corner_box2d_torch(pred_decode[..., :2], pred_decode[..., 2:4], pred_decode[..., 4:])
target_corners = center_to_corner_box2d_torch(target_decode[..., :2], target_decode[..., 2:4],
target_decode[..., 4:])
loss_loc = torch.sum(torch.norm(pred_corners - target_corners, dim=-1)) / N
return loss_loc
def loss_calculator(self, result, anchors, reg_loss_mask, reg_targets, labels, N, motion_labels=None,
motion_mask=None):
loss_num = 0
# calculate loss
weights = torch.Tensor([0.005, 1.0, 1.0, 1.0, 1.0]).cuda().double()
loss_cls = torch.sum(self.criterion['cls'](result['cls'], labels)) / N
loss_num += 1
# loss_loc = torch.sum(self.criterion['loc'](result['loc'],reg_targets,mask = reg_loss_mask)) / N
# Motion state
if not motion_labels is None:
loss_motion = torch.sum(self.criterion['cls'](result['state'], motion_labels)) / N
loss_num += 1
loss_mask_num = torch.nonzero(reg_loss_mask.view(-1, reg_loss_mask.shape[-1])).size(0)
# print(loss_mask_num)
# print(torch.sum(reg_targets[:,:,:,:,0][reg_loss_mask[:,:,:,:,2]]))
if self.code_type in ['corner_1', 'corner_2', 'corner_3']:
target = reg_targets[reg_loss_mask].reshape(-1, 5, 2)
flip_target = torch.stack([target[:, 0], target[:, 3], target[:, 4], target[:, 1], target[:, 2]], dim=-2)
pred = result['loc'][reg_loss_mask].reshape(-1, 5, 2)
t = torch.sum(torch.norm(pred - target, dim=-1), dim=-1)
f = torch.sum(torch.norm(pred - flip_target, dim=-1), dim=-1)
loss_loc = torch.sum(torch.min(t, f)) / N
elif self.code_type == 'faf':
if self.loss_type == 'corner_loss':
if self.only_det:
loss_loc = self.corner_loss(anchors, reg_loss_mask, reg_targets, result['loc'])
loss_num += 1
elif self.config.pred_type in ['motion', 'center']:
###only center/motion for pred
loss_loc_1 = self.corner_loss(anchors, reg_loss_mask[..., 0][..., [0]], reg_targets[..., [0], :],
result['loc'][..., [0], :])
pred_reg_loss_mask = reg_loss_mask[..., 1:, :]
if self.config.motion_state:
pred_reg_loss_mask = motion_mask # mask out static object
loss_loc_2 = F.smooth_l1_loss(result['loc'][..., 1:, :][pred_reg_loss_mask],
reg_targets[..., 1:, :][pred_reg_loss_mask])
loss_loc = loss_loc_1 + loss_loc_2
loss_num += 2
###corners for pred
else:
loss_loc = self.corner_loss(anchors, reg_loss_mask, reg_targets, result['loc'])
loss_num += 1
else:
loss_loc = F.smooth_l1_loss(result['loc'][reg_loss_mask], reg_targets[reg_loss_mask])
loss_num += 1
if self.loss_scale is not None:
if len(self.loss_scale) == 4:
loss = self.loss_scale[0] * loss_cls + self.loss_scale[1] * loss_loc_1 + self.loss_scale[
2] * loss_loc_2 + self.loss_scale[3] * loss_motion
elif len(self.loss_scale) == 3:
loss = self.loss_scale[0] * loss_cls + self.loss_scale[1] * loss_loc_1 + self.loss_scale[2] * loss_loc_2
else:
loss = self.loss_scale[0] * loss_cls + self.loss_scale[1] * loss_loc
elif not motion_labels is None:
loss = loss_cls + loss_loc + loss_motion
else:
loss = loss_cls + loss_loc
if loss_num == 2:
return (loss_num, loss, loss_cls, loss_loc)
elif loss_num == 3:
return (loss_num, loss, loss_cls, loss_loc_1, loss_loc_2)
elif loss_num == 4:
return (loss_num, loss, loss_cls, loss_loc_1, loss_loc_2, loss_motion)
def step(self, data, batch_size):
bev_seq = data['bev_seq']
bev_seq_teacher = data['bev_seq_teacher']
kd_weight = data['kd_weight']
layer = data['layer']
self.teacher.eval()
labels = data['labels']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
anchors = data['anchors']
vis_maps = data['vis_maps']
trans_matrices = data['trans_matrices']
num_agent = data['num_agent']
# with torch.autograd.set_detect_anomaly(True):
if self.MGDA:
self.loss_scale = self.cal_loss_scale(data)
x = self.encoder(bev_seq)
result = self.head(x)
else:
result, x_8, x_7, x_6, x_5, x_3 = self.model(bev_seq, trans_matrices, num_agent, batch_size=batch_size)
x_8_teacher, x_7_teacher, x_6_teacher, x_5_teacher,x_3_teacher = self.teacher(bev_seq_teacher, vis=vis_maps)
kl_loss_mean = nn.KLDivLoss(size_average=True, reduce=True)
# size: x_8: 32*256*256, x_7: 64*128*128, x_8: 128*64*64, x_8: 256*32*32
# if layer==4:
# kd_loss = kd_weight * torch.sum(torch.norm((x_8.reshape(5 * batch_size, -1) - x_8_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size) + \
# kd_weight * torch.sum(torch.norm((x_7.reshape(5 * batch_size, -1) - x_7_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size) + \
# kd_weight * torch.sum(torch.norm((x_6.reshape(5 * batch_size, -1) - x_6_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size) + \
# kd_weight * torch.sum(torch.norm((x_5.reshape(5 * batch_size, -1) - x_5_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size)
# elif layer==3:
# kd_loss = kd_weight * torch.sum(torch.norm((x_8.reshape(5 * batch_size, -1) - x_8_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size) + \
# kd_weight * torch.sum(torch.norm((x_7.reshape(5 * batch_size, -1) - x_7_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size) + \
# kd_weight * torch.sum(torch.norm((x_6.reshape(5 * batch_size, -1) - x_6_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size) + \
# kd_weight * torch.sum(torch.norm((x_5.reshape(5 * batch_size, -1) - x_5_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size)
# elif layer == 2:
# kd_loss = kd_weight * torch.sum(torch.norm((x_8.reshape(5 * batch_size, -1) - x_8_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size) + \
# kd_weight * torch.sum(torch.norm((x_7.reshape(5 * batch_size, -1) - x_7_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size) + \
# kd_weight * torch.sum(torch.norm((x_6.reshape(5 * batch_size, -1) - x_6_teacher.reshape(5 * batch_size, -1)), dim=1)) / (5 * batch_size)
if layer==4:
target_x8 = x_8_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*256*256, -1)
student_x8 = x_8.permute(0, 2, 3, 1).reshape(5 *batch_size*256*256, -1)
kd_loss_x8 = kl_loss_mean(F.log_softmax(student_x8, dim=1), F.softmax(target_x8, dim=1))
target_x7 = x_7_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*128*128, -1)
student_x7 = x_7.permute(0, 2, 3, 1).reshape(5 *batch_size*128*128, -1)
kd_loss_x7 = kl_loss_mean(F.log_softmax(student_x7, dim=1), F.softmax(target_x7, dim=1))
target_x6 = x_6_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*64*64, -1)
student_x6 = x_6.permute(0, 2, 3, 1).reshape(5 *batch_size*64*64, -1)
kd_loss_x6 = kl_loss_mean(F.log_softmax(student_x6, dim=1), F.softmax(target_x6, dim=1))
target_x5 = x_5_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*32*32, -1)
student_x5 = x_5.permute(0, 2, 3, 1).reshape(5 *batch_size*32*32, -1)
kd_loss_x5 = kl_loss_mean(F.log_softmax(student_x5, dim=1), F.softmax(target_x5, dim=1))
kd_loss = kd_weight * (kd_loss_x8 + kd_loss_x7 + kd_loss_x6 + kd_loss_x5)
elif layer==3:
target_x8 = x_8_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*256*256, -1)
student_x8 = x_8.permute(0, 2, 3, 1).reshape(5 *batch_size*256*256, -1)
kd_loss_x8 = kl_loss_mean(F.log_softmax(student_x8, dim=1), F.softmax(target_x8, dim=1))
target_x7 = x_7_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*128*128, -1)
student_x7 = x_7.permute(0, 2, 3, 1).reshape(5 *batch_size*128*128, -1)
kd_loss_x7 = kl_loss_mean(F.log_softmax(student_x7, dim=1), F.softmax(target_x7, dim=1))
target_x6 = x_6_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*64*64, -1)
student_x6 = x_6.permute(0, 2, 3, 1).reshape(5 *batch_size*64*64, -1)
kd_loss_x6 = kl_loss_mean(F.log_softmax(student_x6, dim=1), F.softmax(target_x6, dim=1))
target_x5 = x_5_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*32*32, -1)
student_x5 = x_5.permute(0, 2, 3, 1).reshape(5 *batch_size*32*32, -1)
kd_loss_x5 = kl_loss_mean(F.log_softmax(student_x5, dim=1), F.softmax(target_x5, dim=1))
target_x3 = x_3_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*32*32, -1)
student_x3 = x_3.permute(0, 2, 3, 1).reshape(5 *batch_size*32*32, -1)
kd_loss_x3 = kl_loss_mean(F.log_softmax(student_x3, dim=1), F.softmax(target_x3, dim=1))
kd_loss = kd_weight * (kd_loss_x8 + kd_loss_x7 + kd_loss_x6 + kd_loss_x5 + kd_loss_x3)
elif layer==2:
target_x8 = x_8_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*256*256, -1)
student_x8 = x_8.permute(0, 2, 3, 1).reshape(5 *batch_size*256*256, -1)
kd_loss_x8 = kl_loss_mean(F.log_softmax(student_x8, dim=1), F.softmax(target_x8, dim=1))
target_x7 = x_7_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*128*128, -1)
student_x7 = x_7.permute(0, 2, 3, 1).reshape(5 *batch_size*128*128, -1)
kd_loss_x7 = kl_loss_mean(F.log_softmax(student_x7, dim=1), F.softmax(target_x7, dim=1))
target_x6 = x_6_teacher.permute(0, 2, 3, 1).reshape(5 *batch_size*64*64, -1)
student_x6 = x_6.permute(0, 2, 3, 1).reshape(5 *batch_size*64*64, -1)
kd_loss_x6 = kl_loss_mean(F.log_softmax(student_x6, dim=1), F.softmax(target_x6, dim=1))
kd_loss = kd_weight * (kd_loss_x8 + kd_loss_x7 + kd_loss_x6)
labels = labels.view(result['cls'].shape[0], -1, result['cls'].shape[-1])
N = bev_seq.shape[0]
loss_collect = self.loss_calculator(result, anchors, reg_loss_mask, reg_targets, labels, N)
# -------- for debugging teacher model---------#
# loss_collect_teacher = self.loss_calculator(result_teacher,anchors,reg_loss_mask,reg_targets,labels,N)
# loss_num, loss, loss_cls, loss_loc = loss_collect_teacher
# print(loss, loss_cls, loss_loc)
loss_num = loss_collect[0]
if loss_num == 3:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2 = loss_collect
elif loss_num == 2:
loss_num, loss, loss_cls, loss_loc = loss_collect
elif loss_num == 4:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2, loss_motion = loss_collect
loss = loss + kd_loss
print(kd_loss.item())
if self.MGDA:
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss.backward()
self.optimizer_encoder.step()
self.optimizer_head.step()
else:
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
if self.config.pred_type in ['motion', 'center'] and not self.only_det:
if self.config.motion_state:
return loss.item(), loss_cls.item(), loss_loc_1.item(), loss_loc_2.item(), loss_motion.item()
else:
return loss.item(), loss_cls.item(), loss_loc_1.item(), loss_loc_2.item()
else:
return loss.item(), loss_cls.item(), loss_loc.item(), kd_loss.item()
def predict(self, data, validation=True):
bev_seq = data['bev_seq']
vis_maps = data['vis_maps']
if self.MGDA:
x = self.encoder(bev_seq)
result = self.head(x)
else:
result = self.model(bev_seq, vis=vis_maps)
N = bev_seq.shape[0]
if validation:
labels = data['labels']
anchors = data['anchors']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
motion_labels = None
motion_mask = None
labels = labels.view(result['cls'].shape[0], -1, result['cls'].shape[-1])
if self.config.motion_state:
motion_labels = data['motion_label']
motion_mask = data['motion_mask']
motion_labels = motion_labels.view(result['state'].shape[0], -1, result['state'].shape[-1])
N = bev_seq.shape[0]
loss_collect = self.loss_calculator(result, anchors, reg_loss_mask, reg_targets, labels, N, motion_labels,
motion_mask)
loss_num = loss_collect[0]
if loss_num == 3:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2 = loss_collect
elif loss_num == 2:
loss_num, loss, loss_cls, loss_loc = loss_collect
elif loss_num == 4:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2, loss_motion = loss_collect
batch_box_preds = result['loc']
batch_cls_preds = result['cls']
if self.config.motion_state:
batch_motion_preds = result['state']
else:
batch_motion_preds = None
if not self.only_det:
if self.config.pred_type == 'center':
batch_box_preds[:, :, :, :, 1:, 2:] = batch_box_preds[:, :, :, :, [0], 2:]
class_selected = apply_nms_det(batch_box_preds, batch_cls_preds, anchors, self.code_type, self.config,
batch_motion_preds)
# class_selected = None
if validation:
if self.config.pred_type in ['motion', 'center'] and not self.only_det:
if self.config.motion_state:
return loss.item(), loss_cls.item(), loss_loc_1.item(), loss_loc_2.item(), loss_motion.item(), class_selected
else:
return loss.item(), loss_cls.item(), loss_loc_1.item(), loss_loc_2.item(), class_selected
else:
return loss.item(), loss_cls.item(), loss_loc.item(), class_selected
else:
return class_selected
def predict_all(self, data, batch_size, validation=True):
NUM_AGENT = 5
bev_seq = data['bev_seq']
vis_maps = data['vis_maps']
trans_matrices = data['trans_matrices']
num_agent_tensor = data['num_agent']
num_sensor = num_agent_tensor[0, 0]
if self.MGDA:
x = self.encoder(bev_seq)
result = self.head(x)
else:
with torch.no_grad():
result, x_8, x_7, x_6, x_5, x_3 = self.model(bev_seq, trans_matrices, num_agent_tensor, batch_size=batch_size)
# result = self.model(bev_seq,vis=vis_maps,training=False)
#
N = bev_seq.shape[0]
if validation:
labels = data['labels']
anchors = data['anchors']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
motion_labels = None
motion_mask = None
labels = labels.view(result['cls'].shape[0], -1, result['cls'].shape[-1])
if self.config.motion_state:
motion_labels = data['motion_label']
motion_mask = data['motion_mask']
motion_labels = motion_labels.view(result['state'].shape[0], -1, result['state'].shape[-1])
N = bev_seq.shape[0]
loss_collect = self.loss_calculator(result, anchors, reg_loss_mask, reg_targets, labels, N, motion_labels,
motion_mask)
loss_num = loss_collect[0]
if loss_num == 3:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2 = loss_collect
elif loss_num == 2:
loss_num, loss, loss_cls, loss_loc = loss_collect
elif loss_num == 4:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2, loss_motion = loss_collect
seq_results = [[] for i in range(NUM_AGENT)]
# global_points = [[] for i in range(num_sensor)]
# cls_preds = [[] for i in range(num_sensor)]
for k in range(NUM_AGENT):
bev_seq = torch.unsqueeze(data['bev_seq'][k, :, :, :, :], 0)
if torch.nonzero(bev_seq).shape[0] == 0:
seq_results[k] = []
else:
batch_box_preds = torch.unsqueeze(result['loc'][k, :, :, :, :, :], 0)
batch_cls_preds = torch.unsqueeze(result['cls'][k, :, :], 0)
anchors = torch.unsqueeze(data['anchors'][k, :, :, :, :], 0)
batch_motion_preds = None
if not self.only_det:
if self.config.pred_type == 'center':
batch_box_preds[:, :, :, :, 1:, 2:] = batch_box_preds[:, :, :, :, [0], 2:]
class_selected = apply_nms_det(batch_box_preds, batch_cls_preds, anchors, self.code_type, self.config,
batch_motion_preds)
seq_results[k] = class_selected
# global_points[k], cls_preds[k] = apply_box_global_transform(trans_matrices_map[k],batch_box_preds,batch_cls_preds,anchors,self.code_type,self.config,batch_motion_preds)
# all_points_scene = numpy.concatenate(tuple(global_points), 0)
# cls_preds_scene = torch.cat(tuple(cls_preds), 0)
# class_selected_global = apply_nms_global_scene(all_points_scene, cls_preds_scene)
if validation:
return loss.item(), loss_cls.item(), loss_loc.item(), seq_results
else:
return seq_results
def predict_all_with_box_com(self, data, trans_matrices_map, validation=True):
NUM_AGENT = 5
bev_seq = data['bev_seq']
vis_maps = data['vis_maps']
trans_matrices = data['trans_matrices']
num_agent_tensor = data['num_agent']
num_sensor = num_agent_tensor[0, 0]
if self.MGDA:
x = self.encoder(bev_seq)
result = self.head(x)
else:
result = self.model(bev_seq, trans_matrices, num_agent_tensor, batch_size=1)
N = bev_seq.shape[0]
if validation:
labels = data['labels']
anchors = data['anchors']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
motion_labels = None
motion_mask = None
labels = labels.view(result['cls'].shape[0], -1, result['cls'].shape[-1])
if self.config.motion_state:
motion_labels = data['motion_label']
motion_mask = data['motion_mask']
motion_labels = motion_labels.view(result['state'].shape[0], -1, result['state'].shape[-1])
N = bev_seq.shape[0]
loss_collect = self.loss_calculator(result, anchors, reg_loss_mask, reg_targets, labels, N, motion_labels,
motion_mask)
loss_num = loss_collect[0]
if loss_num == 3:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2 = loss_collect
elif loss_num == 2:
loss_num, loss, loss_cls, loss_loc = loss_collect
elif loss_num == 4:
loss_num, loss, loss_cls, loss_loc_1, loss_loc_2, loss_motion = loss_collect
seq_results = [[] for i in range(NUM_AGENT)]
local_results_wo_local_nms = [[] for i in range(NUM_AGENT)]
local_results_af_local_nms = [[] for i in range(NUM_AGENT)]
global_points = [[] for i in range(num_sensor)]
cls_preds = [[] for i in range(num_sensor)]
global_boxes_af_localnms = [[] for i in range(num_sensor)]
box_scores_af_localnms = [[] for i in range(num_sensor)]
forward_message_size = 0
forward_message_size_two_nms = 0
for k in range(NUM_AGENT):
bev_seq = torch.unsqueeze(data['bev_seq'][k, :, :, :, :], 0)
if torch.nonzero(bev_seq).shape[0] == 0:
seq_results[k] = []
else:
batch_box_preds = torch.unsqueeze(result['loc'][k, :, :, :, :, :], 0)
batch_cls_preds = torch.unsqueeze(result['cls'][k, :, :], 0)
anchors = torch.unsqueeze(data['anchors'][k, :, :, :, :], 0)
if self.config.motion_state:
batch_motion_preds = result['state']
else:
batch_motion_preds = None
if not self.only_det:
if self.config.pred_type == 'center':
batch_box_preds[:, :, :, :, 1:, 2:] = batch_box_preds[:, :, :, :, [0], 2:]
class_selected, box_scores_pred_cls = apply_nms_det(batch_box_preds, batch_cls_preds, anchors,
self.code_type, self.config, batch_motion_preds)
# transform all the boxes before local nms to the global coordinate
# global_points[k], cls_preds[k] = apply_box_global_transform(trans_matrices_map[k], batch_box_preds,
# batch_cls_preds, anchors, self.code_type,
# self.config, batch_motion_preds)
# transform the boxes after local nms to the global coordinate
global_boxes_af_localnms[k], box_scores_af_localnms[k] = apply_box_global_transform_af_localnms(
trans_matrices_map[k], class_selected, box_scores_pred_cls)
# print(cls_preds[k].shape, box_scores_af_localnms[k].shape)
forward_message_size = forward_message_size + 256 * 256 * 6 * 4 * 2
forward_message_size_two_nms = forward_message_size_two_nms + global_boxes_af_localnms[k].shape[
0] * 4 * 2
# global results with one NMS
# all_points_scene = numpy.concatenate(tuple(global_points), 0)
# cls_preds_scene = torch.cat(tuple(cls_preds), 0)
# class_selected_global = apply_nms_global_scene(all_points_scene, cls_preds_scene)
# global results with two NMS
global_boxes_af_local_nms = numpy.concatenate(tuple(global_boxes_af_localnms), 0)
box_scores_af_local_nms = torch.cat(tuple(box_scores_af_localnms), 0)
class_selected_global_af_local_nms = apply_nms_global_scene(global_boxes_af_local_nms, box_scores_af_local_nms)
# transform the consensus global boxes to local agents (two NMS)
back_message_size_two_nms = 0
for k in range(num_sensor):
local_results_af_local_nms[k], ms = apply_box_local_transform(class_selected_global_af_local_nms,
trans_matrices_map[k])
back_message_size_two_nms = back_message_size_two_nms + ms
sample_bandwidth_two_nms = forward_message_size_two_nms + back_message_size_two_nms
# transform the consensus global boxes to local agents (One NMS)
# back_message_size = 0
# for k in range(num_sensor):
# local_results_wo_local_nms[k], ms = apply_box_local_transform(class_selected_global, trans_matrices_map[k])
# back_message_size = back_message_size + ms
# sample_bandwidth = forward_message_size + back_message_size
return loss.item(), loss_cls.item(), loss_loc.item(), local_results_af_local_nms, class_selected_global_af_local_nms, sample_bandwidth_two_nms
def cal_loss_scale(self, data):
bev_seq = data['bev_seq']
labels = data['labels']
reg_targets = data['reg_targets']
reg_loss_mask = data['reg_loss_mask']
anchors = data['anchors']
motion_labels = None
motion_mask = None
with torch.no_grad():
shared_feats = self.encoder(bev_seq)
shared_feats_tensor = shared_feats.clone().detach().requires_grad_(True)
result = self.head(shared_feats_tensor)
if self.config.motion_state:
motion_labels = data['motion_label']
motion_mask = data['motion_mask']
motion_labels = motion_labels.view(result['state'].shape[0], -1, result['state'].shape[-1])
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
grads = {}
labels = labels.view(result['cls'].shape[0], -1, result['cls'].shape[-1])
N = bev_seq.shape[0]
# calculate loss
grad_len = 0
'''
Classification Loss
'''
loss_cls = self.alpha * torch.sum(self.criterion['cls'](result['cls'], labels)) / N
# loss_loc = torch.sum(self.criterion['loc'](result['loc'],reg_targets,mask = reg_loss_mask)) / N
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_cls.backward(retain_graph=True)
grads[0] = []
grads[0].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
grad_len += 1
'''
Localization Loss
'''
loc_scale = False
loss_mask_num = torch.nonzero(reg_loss_mask.view(-1, reg_loss_mask.shape[-1])).size(0)
if self.code_type in ['corner_1', 'corner_2', 'corner_3']:
target = reg_targets[reg_loss_mask].reshape(-1, 5, 2)
flip_target = torch.stack([target[:, 0], target[:, 3], target[:, 4], target[:, 1], target[:, 2]], dim=-2)
pred = result['loc'][reg_loss_mask].reshape(-1, 5, 2)
t = torch.sum(torch.norm(pred - target, dim=-1), dim=-1)
f = torch.sum(torch.norm(pred - flip_target, dim=-1), dim=-1)
loss_loc = torch.sum(torch.min(t, f)) / N
elif self.code_type == 'faf':
if self.loss_type == 'corner_loss':
if self.only_det:
loss_loc = self.corner_loss(anchors, reg_loss_mask, reg_targets, result['loc'])
elif self.config.pred_type in ['motion', 'center']:
###only center/motion for pred
loss_loc_1 = self.corner_loss(anchors, reg_loss_mask[..., 0][..., [0]], reg_targets[..., [0], :],
result['loc'][..., [0], :])
pred_reg_loss_mask = reg_loss_mask[..., 1:, :]
if self.config.motion_state:
pred_reg_loss_mask = motion_mask # mask out static object
loss_loc_2 = F.smooth_l1_loss(result['loc'][..., 1:, :][pred_reg_loss_mask],
reg_targets[..., 1:, :][pred_reg_loss_mask])
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_loc_1.backward(retain_graph=True)
grads[1] = []
grads[1].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_loc_2.backward(retain_graph=True)
grads[2] = []
grads[2].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
loc_scale = True
grad_len += 2
###corners for pred
else:
loss_loc = self.corner_loss(anchors, reg_loss_mask, reg_targets, result['loc'])
else:
loss_loc = F.smooth_l1_loss(result['loc'][reg_loss_mask], reg_targets[reg_loss_mask])
if not loc_scale:
grad_len += 1
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_loc.backward(retain_graph=True)
grads[1] = []
grads[1].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
'''
Motion state Loss
'''
if self.config.motion_state:
loss_motion = torch.sum(self.criterion['cls'](result['state'], motion_labels)) / N
self.optimizer_encoder.zero_grad()
self.optimizer_head.zero_grad()
loss_motion.backward(retain_graph=True)
grads[3] = []
grads[3].append(shared_feats_tensor.grad.data.clone().detach())
shared_feats_tensor.grad.data.zero_()
grad_len += 1
# ---------------------------------------------------------------------
# -- Frank-Wolfe iteration to compute scales.
scale = np.zeros(grad_len, dtype=np.float32)
sol, min_norm = MinNormSolver.find_min_norm_element([grads[t] for t in range(grad_len)])
for i in range(grad_len):
scale[i] = float(sol[i])
# print(scale)
return scale
| 39.100769
| 174
| 0.693061
| 8,045
| 50,831
| 4.063642
| 0.034804
| 0.029549
| 0.03533
| 0.02392
| 0.964028
| 0.961642
| 0.961091
| 0.957757
| 0.95476
| 0.953995
| 0
| 0.029387
| 0.159175
| 50,831
| 1,300
| 175
| 39.100769
| 0.735517
| 0.134544
| 0
| 0.926045
| 0
| 0
| 0.042559
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021436
| false
| 0
| 0.009646
| 0.002144
| 0.067524
| 0.007503
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d2296b311df25ef3a2dd98fa0c057cd6c6bbdcf2
| 13,393
|
py
|
Python
|
procyon/starcatalog/migrations/0001_initial.py
|
jaycrossler/procyon
|
80c2069fc60289b6b4c832ecba964e133823f300
|
[
"MIT"
] | 2
|
2018-03-04T04:45:02.000Z
|
2018-03-04T18:08:57.000Z
|
procyon/starcatalog/migrations/0001_initial.py
|
jaycrossler/procyon
|
80c2069fc60289b6b4c832ecba964e133823f300
|
[
"MIT"
] | null | null | null |
procyon/starcatalog/migrations/0001_initial.py
|
jaycrossler/procyon
|
80c2069fc60289b6b4c832ecba964e133823f300
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'StarPossiblyHabitable'
db.create_table(u'starcatalog_starpossiblyhabitable', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('HIP', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
))
db.send_create_signal(u'starcatalog', ['StarPossiblyHabitable'])
# Adding model 'StarType'
db.create_table(u'starcatalog_startype', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('symbol', self.gf('django.db.models.fields.CharField')(default='K', max_length=2, db_index=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=30, null=True, blank=True)),
('description', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
('surface_temp_range', self.gf('django.db.models.fields.CharField')(max_length=30, null=True, blank=True)),
('base_color', self.gf('django.db.models.fields.CharField')(default='#ffddbe', max_length=8, null=True, blank=True)),
('mass_range', self.gf('django.db.models.fields.CharField')(max_length=30, null=True, blank=True)),
('radius_range', self.gf('django.db.models.fields.CharField')(max_length=30, null=True, blank=True)),
('luminosity_range', self.gf('django.db.models.fields.CharField')(max_length=30, null=True, blank=True)),
('age', self.gf('django.db.models.fields.CharField')(default='5300', max_length=30, null=True, blank=True)),
))
db.send_create_signal(u'starcatalog', ['StarType'])
# Adding model 'Star'
db.create_table(u'starcatalog_star', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('HIP', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
('HD', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
('HR', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
('gliese', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=40, null=True, blank=True)),
('bayer_flamsteed', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)),
('proper_name', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=100, null=True, blank=True)),
('RA', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('dec', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('distance_parsecs', self.gf('django.db.models.fields.FloatField')(db_index=True, null=True, blank=True)),
('PMRA', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('PMDec', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('RV', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('mag', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('abs_mag', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('spectrum', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)),
('color_index', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('X', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('Y', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('Z', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('VX', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('VY', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('VZ', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
))
db.send_create_signal(u'starcatalog', ['Star'])
# Adding model 'Planet'
db.create_table(u'starcatalog_planet', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=60, null=True, blank=True)),
('mass', self.gf('django.db.models.fields.FloatField')(db_index=True, null=True, blank=True)),
('semi_major_axis', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('orbital_period', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('orbital_eccentricity', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('periastron', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('periastron_time', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('velocity_semi_amplitude', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('other_name', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=60, null=True, blank=True)),
('HD', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
('HR', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
('HIP', self.gf('django.db.models.fields.PositiveIntegerField')(db_index=True, null=True, blank=True)),
('gliese', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=40, null=True, blank=True)),
('kepler_id', self.gf('django.db.models.fields.CharField')(db_index=True, max_length=40, null=True, blank=True)),
('radius', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('density', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
('gravity', self.gf('django.db.models.fields.FloatField')(null=True, blank=True)),
))
db.send_create_signal(u'starcatalog', ['Planet'])
def backwards(self, orm):
# Deleting model 'StarPossiblyHabitable'
db.delete_table(u'starcatalog_starpossiblyhabitable')
# Deleting model 'StarType'
db.delete_table(u'starcatalog_startype')
# Deleting model 'Star'
db.delete_table(u'starcatalog_star')
# Deleting model 'Planet'
db.delete_table(u'starcatalog_planet')
models = {
u'starcatalog.planet': {
'HD': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'HIP': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'HR': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'Meta': {'ordering': "['name']", 'object_name': 'Planet'},
'density': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'gliese': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'gravity': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kepler_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'mass': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '60', 'null': 'True', 'blank': 'True'}),
'orbital_eccentricity': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'orbital_period': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'other_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '60', 'null': 'True', 'blank': 'True'}),
'periastron': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'periastron_time': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'radius': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'semi_major_axis': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'velocity_semi_amplitude': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'})
},
u'starcatalog.star': {
'HD': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'HIP': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'HR': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'Meta': {'ordering': "['distance_parsecs']", 'object_name': 'Star'},
'PMDec': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'PMRA': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'RA': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'RV': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'VX': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'VY': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'VZ': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'X': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'Y': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'Z': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'abs_mag': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'bayer_flamsteed': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'color_index': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'dec': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'distance_parsecs': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'gliese': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mag': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'proper_name': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '100', 'null': 'True', 'blank': 'True'}),
'spectrum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'})
},
u'starcatalog.starpossiblyhabitable': {
'HIP': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'Meta': {'object_name': 'StarPossiblyHabitable'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'starcatalog.startype': {
'Meta': {'object_name': 'StarType'},
'age': ('django.db.models.fields.CharField', [], {'default': "'5300'", 'max_length': '30', 'null': 'True', 'blank': 'True'}),
'base_color': ('django.db.models.fields.CharField', [], {'default': "'#ffddbe'", 'max_length': '8', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'luminosity_range': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'mass_range': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'radius_range': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'surface_temp_range': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'symbol': ('django.db.models.fields.CharField', [], {'default': "'K'", 'max_length': '2', 'db_index': 'True'})
}
}
complete_apps = ['starcatalog']
| 80.197605
| 146
| 0.595236
| 1,570
| 13,393
| 4.987261
| 0.071338
| 0.109323
| 0.189527
| 0.270754
| 0.907535
| 0.869732
| 0.856066
| 0.849681
| 0.847254
| 0.807024
| 0
| 0.006516
| 0.175017
| 13,393
| 167
| 147
| 80.197605
| 0.702145
| 0.017546
| 0
| 0.201389
| 0
| 0
| 0.474865
| 0.295764
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013889
| false
| 0
| 0.027778
| 0
| 0.0625
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
737c558fcaf51eb858b56852149accc579ceeb5c
| 2,645
|
py
|
Python
|
data_loader.py
|
HarryXD2018/deep_motion_mag
|
2ba6c33bb8f57c5f30bd2e53372137844c389af0
|
[
"MIT"
] | 316
|
2018-08-01T06:49:04.000Z
|
2022-03-18T07:55:43.000Z
|
data_loader.py
|
HarryXD2018/deep_motion_mag
|
2ba6c33bb8f57c5f30bd2e53372137844c389af0
|
[
"MIT"
] | 22
|
2018-10-16T18:16:00.000Z
|
2021-07-08T02:13:02.000Z
|
data_loader.py
|
HarryXD2018/deep_motion_mag
|
2ba6c33bb8f57c5f30bd2e53372137844c389af0
|
[
"MIT"
] | 99
|
2018-08-15T12:00:44.000Z
|
2022-03-03T13:36:16.000Z
|
import tensorflow as tf
def read_and_decode(filename_queue, im_size=(512, 512, 1)):
writeOpts = tf.python_io.TFRecordOptions(\
tf.python_io.TFRecordCompressionType.ZLIB)
reader = tf.TFRecordReader(options=writeOpts)
_, single_example = reader.read(filename_queue)
features = tf.parse_single_example(
single_example,
features={
'frameA': tf.FixedLenFeature([], tf.string),
'frameB': tf.FixedLenFeature([], tf.string),
'amplified': tf.FixedLenFeature([], tf.string),
'amplification_factor': tf.FixedLenFeature([], tf.float32),
})
frameA = tf.decode_raw(features['frameA'], tf.uint8)
frameB = tf.decode_raw(features['frameB'], tf.uint8)
frameAmp = tf.decode_raw(features['amplified'], tf.uint8)
amplification_factor = tf.cast(features['amplification_factor'], tf.float32)
frameA = tf.reshape(frameA, im_size)
frameB = tf.reshape(frameB, im_size)
frameAmp = tf.reshape(frameAmp, im_size)
# Normalize to -1 to +1
frameA = tf.to_float(frameA) / 127.5 - 1.0
frameB = tf.to_float(frameB) / 127.5 - 1.0
frameAmp = tf.to_float(frameAmp) / 127.5 - 1.0
return frameA, frameB, frameAmp, amplification_factor
def read_and_decode_3frames(filename_queue, im_size=(512, 512, 1)):
writeOpts = tf.python_io.TFRecordOptions(\
tf.python_io.TFRecordCompressionType.ZLIB)
reader = tf.TFRecordReader(options=writeOpts)
_, single_example = reader.read(filename_queue)
features = tf.parse_single_example(
single_example,
features={
'frameA': tf.FixedLenFeature([], tf.string),
'frameB': tf.FixedLenFeature([], tf.string),
'frameC': tf.FixedLenFeature([], tf.string),
'amplified': tf.FixedLenFeature([], tf.string),
'amplification_factor': tf.FixedLenFeature([], tf.float32),
})
frameA = tf.decode_raw(features['frameA'], tf.uint8)
frameB = tf.decode_raw(features['frameB'], tf.uint8)
frameC = tf.decode_raw(features['frameC'], tf.uint8)
frameAmp = tf.decode_raw(features['amplified'], tf.uint8)
amplification_factor = tf.cast(features['amplification_factor'], tf.float32)
frameA = tf.reshape(frameA, im_size)
frameB = tf.reshape(frameB, im_size)
frameC = tf.reshape(frameC, im_size)
frameAmp = tf.reshape(frameAmp, im_size)
# Normalize to -1 to +1
frameA = tf.to_float(frameA) / 127.5 - 1.0
frameB = tf.to_float(frameB) / 127.5 - 1.0
frameC = tf.to_float(frameC) / 127.5 - 1.0
frameAmp = tf.to_float(frameAmp) / 127.5 - 1.0
return frameA, frameB, frameC, frameAmp, amplification_factor
| 40.692308
| 80
| 0.67448
| 334
| 2,645
| 5.179641
| 0.146707
| 0.046243
| 0.098844
| 0.101156
| 0.885549
| 0.885549
| 0.885549
| 0.885549
| 0.885549
| 0.885549
| 0
| 0.035365
| 0.187524
| 2,645
| 64
| 81
| 41.328125
| 0.76966
| 0.016257
| 0
| 0.830189
| 0
| 0
| 0.067718
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037736
| false
| 0
| 0.018868
| 0
| 0.09434
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
73efa7819178b09587cad5b25ee0cee422cb805c
| 23,815
|
py
|
Python
|
sdk/python/pulumi_keycloak/role.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 13
|
2020-04-28T15:20:56.000Z
|
2022-03-24T18:00:17.000Z
|
sdk/python/pulumi_keycloak/role.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 49
|
2020-02-06T17:53:35.000Z
|
2022-03-25T19:36:08.000Z
|
sdk/python/pulumi_keycloak/role.py
|
davide-talesco/pulumi-keycloak
|
08d66be6f2bf578d4292e29eb6181794375bc4e5
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-06-09T01:08:56.000Z
|
2021-12-07T15:30:37.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['RoleArgs', 'Role']
@pulumi.input_type
class RoleArgs:
def __init__(__self__, *,
realm_id: pulumi.Input[str],
attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,
client_id: Optional[pulumi.Input[str]] = None,
composite_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Role resource.
:param pulumi.Input[str] realm_id: The realm this role exists within.
:param pulumi.Input[Mapping[str, Any]] attributes: A map representing attributes for the role. In order to add multivalue attributes, use `##` to seperate the values. Max length for each value is 255 chars
:param pulumi.Input[str] client_id: When specified, this role will be created as a client role attached to the client with the provided ID
:param pulumi.Input[Sequence[pulumi.Input[str]]] composite_roles: When specified, this role will be a composite role, composed of all roles that have an ID present within this list.
:param pulumi.Input[str] description: The description of the role
:param pulumi.Input[str] name: The name of the role
"""
pulumi.set(__self__, "realm_id", realm_id)
if attributes is not None:
pulumi.set(__self__, "attributes", attributes)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if composite_roles is not None:
pulumi.set(__self__, "composite_roles", composite_roles)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Input[str]:
"""
The realm this role exists within.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: pulumi.Input[str]):
pulumi.set(self, "realm_id", value)
@property
@pulumi.getter
def attributes(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
A map representing attributes for the role. In order to add multivalue attributes, use `##` to seperate the values. Max length for each value is 255 chars
"""
return pulumi.get(self, "attributes")
@attributes.setter
def attributes(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "attributes", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
When specified, this role will be created as a client role attached to the client with the provided ID
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="compositeRoles")
def composite_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
When specified, this role will be a composite role, composed of all roles that have an ID present within this list.
"""
return pulumi.get(self, "composite_roles")
@composite_roles.setter
def composite_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "composite_roles", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the role
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the role
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class _RoleState:
def __init__(__self__, *,
attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,
client_id: Optional[pulumi.Input[str]] = None,
composite_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Role resources.
:param pulumi.Input[Mapping[str, Any]] attributes: A map representing attributes for the role. In order to add multivalue attributes, use `##` to seperate the values. Max length for each value is 255 chars
:param pulumi.Input[str] client_id: When specified, this role will be created as a client role attached to the client with the provided ID
:param pulumi.Input[Sequence[pulumi.Input[str]]] composite_roles: When specified, this role will be a composite role, composed of all roles that have an ID present within this list.
:param pulumi.Input[str] description: The description of the role
:param pulumi.Input[str] name: The name of the role
:param pulumi.Input[str] realm_id: The realm this role exists within.
"""
if attributes is not None:
pulumi.set(__self__, "attributes", attributes)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if composite_roles is not None:
pulumi.set(__self__, "composite_roles", composite_roles)
if description is not None:
pulumi.set(__self__, "description", description)
if name is not None:
pulumi.set(__self__, "name", name)
if realm_id is not None:
pulumi.set(__self__, "realm_id", realm_id)
@property
@pulumi.getter
def attributes(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
A map representing attributes for the role. In order to add multivalue attributes, use `##` to seperate the values. Max length for each value is 255 chars
"""
return pulumi.get(self, "attributes")
@attributes.setter
def attributes(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "attributes", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
When specified, this role will be created as a client role attached to the client with the provided ID
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="compositeRoles")
def composite_roles(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
When specified, this role will be a composite role, composed of all roles that have an ID present within this list.
"""
return pulumi.get(self, "composite_roles")
@composite_roles.setter
def composite_roles(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "composite_roles", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the role
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the role
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> Optional[pulumi.Input[str]]:
"""
The realm this role exists within.
"""
return pulumi.get(self, "realm_id")
@realm_id.setter
def realm_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "realm_id", value)
class Role(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,
client_id: Optional[pulumi.Input[str]] = None,
composite_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Allows for creating and managing roles within Keycloak.
Roles allow you define privileges within Keycloak and map them to users and groups.
## Example Usage
### Realm Role)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
realm_role = keycloak.Role("realmRole",
realm_id=realm.id,
description="My Realm Role",
attributes={
"key": "value",
"multivalue": "value1##value2",
})
```
### Client Role)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
openid_client = keycloak.openid.Client("openidClient",
realm_id=realm.id,
client_id="client",
enabled=True,
access_type="CONFIDENTIAL",
valid_redirect_uris=["http://localhost:8080/openid-callback"])
client_role = keycloak.Role("clientRole",
realm_id=realm.id,
client_id=keycloak_client["openid_client"]["id"],
description="My Client Role",
attributes={
"key": "value",
})
```
### Composite Role)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
# realm roles
create_role = keycloak.Role("createRole",
realm_id=realm.id,
attributes={
"key": "value",
})
read_role = keycloak.Role("readRole",
realm_id=realm.id,
attributes={
"key": "value",
})
update_role = keycloak.Role("updateRole",
realm_id=realm.id,
attributes={
"key": "value",
})
delete_role = keycloak.Role("deleteRole",
realm_id=realm.id,
attributes={
"key": "value",
})
# client role
openid_client = keycloak.openid.Client("openidClient",
realm_id=realm.id,
client_id="client",
enabled=True,
access_type="CONFIDENTIAL",
valid_redirect_uris=["http://localhost:8080/openid-callback"])
client_role = keycloak.Role("clientRole",
realm_id=realm.id,
client_id=keycloak_client["openid_client"]["id"],
description="My Client Role",
attributes={
"key": "value",
})
admin_role = keycloak.Role("adminRole",
realm_id=realm.id,
composite_roles=[
create_role.id,
read_role.id,
update_role.id,
delete_role.id,
client_role.id,
],
attributes={
"key": "value",
})
```
## Import
Roles can be imported using the format `{{realm_id}}/{{role_id}}`, where `role_id` is the unique ID that Keycloak assigns to the role. The ID is not easy to find in the GUI, but it appears in the URL when editing the role. Examplebash
```sh
$ pulumi import keycloak:index/role:Role role my-realm/7e8cf32a-8acb-4d34-89c4-04fb1d10ccad
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] attributes: A map representing attributes for the role. In order to add multivalue attributes, use `##` to seperate the values. Max length for each value is 255 chars
:param pulumi.Input[str] client_id: When specified, this role will be created as a client role attached to the client with the provided ID
:param pulumi.Input[Sequence[pulumi.Input[str]]] composite_roles: When specified, this role will be a composite role, composed of all roles that have an ID present within this list.
:param pulumi.Input[str] description: The description of the role
:param pulumi.Input[str] name: The name of the role
:param pulumi.Input[str] realm_id: The realm this role exists within.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RoleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Allows for creating and managing roles within Keycloak.
Roles allow you define privileges within Keycloak and map them to users and groups.
## Example Usage
### Realm Role)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
realm_role = keycloak.Role("realmRole",
realm_id=realm.id,
description="My Realm Role",
attributes={
"key": "value",
"multivalue": "value1##value2",
})
```
### Client Role)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
openid_client = keycloak.openid.Client("openidClient",
realm_id=realm.id,
client_id="client",
enabled=True,
access_type="CONFIDENTIAL",
valid_redirect_uris=["http://localhost:8080/openid-callback"])
client_role = keycloak.Role("clientRole",
realm_id=realm.id,
client_id=keycloak_client["openid_client"]["id"],
description="My Client Role",
attributes={
"key": "value",
})
```
### Composite Role)
```python
import pulumi
import pulumi_keycloak as keycloak
realm = keycloak.Realm("realm",
realm="my-realm",
enabled=True)
# realm roles
create_role = keycloak.Role("createRole",
realm_id=realm.id,
attributes={
"key": "value",
})
read_role = keycloak.Role("readRole",
realm_id=realm.id,
attributes={
"key": "value",
})
update_role = keycloak.Role("updateRole",
realm_id=realm.id,
attributes={
"key": "value",
})
delete_role = keycloak.Role("deleteRole",
realm_id=realm.id,
attributes={
"key": "value",
})
# client role
openid_client = keycloak.openid.Client("openidClient",
realm_id=realm.id,
client_id="client",
enabled=True,
access_type="CONFIDENTIAL",
valid_redirect_uris=["http://localhost:8080/openid-callback"])
client_role = keycloak.Role("clientRole",
realm_id=realm.id,
client_id=keycloak_client["openid_client"]["id"],
description="My Client Role",
attributes={
"key": "value",
})
admin_role = keycloak.Role("adminRole",
realm_id=realm.id,
composite_roles=[
create_role.id,
read_role.id,
update_role.id,
delete_role.id,
client_role.id,
],
attributes={
"key": "value",
})
```
## Import
Roles can be imported using the format `{{realm_id}}/{{role_id}}`, where `role_id` is the unique ID that Keycloak assigns to the role. The ID is not easy to find in the GUI, but it appears in the URL when editing the role. Examplebash
```sh
$ pulumi import keycloak:index/role:Role role my-realm/7e8cf32a-8acb-4d34-89c4-04fb1d10ccad
```
:param str resource_name: The name of the resource.
:param RoleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RoleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,
client_id: Optional[pulumi.Input[str]] = None,
composite_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RoleArgs.__new__(RoleArgs)
__props__.__dict__["attributes"] = attributes
__props__.__dict__["client_id"] = client_id
__props__.__dict__["composite_roles"] = composite_roles
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
if realm_id is None and not opts.urn:
raise TypeError("Missing required property 'realm_id'")
__props__.__dict__["realm_id"] = realm_id
super(Role, __self__).__init__(
'keycloak:index/role:Role',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
attributes: Optional[pulumi.Input[Mapping[str, Any]]] = None,
client_id: Optional[pulumi.Input[str]] = None,
composite_roles: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
description: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
realm_id: Optional[pulumi.Input[str]] = None) -> 'Role':
"""
Get an existing Role resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Mapping[str, Any]] attributes: A map representing attributes for the role. In order to add multivalue attributes, use `##` to seperate the values. Max length for each value is 255 chars
:param pulumi.Input[str] client_id: When specified, this role will be created as a client role attached to the client with the provided ID
:param pulumi.Input[Sequence[pulumi.Input[str]]] composite_roles: When specified, this role will be a composite role, composed of all roles that have an ID present within this list.
:param pulumi.Input[str] description: The description of the role
:param pulumi.Input[str] name: The name of the role
:param pulumi.Input[str] realm_id: The realm this role exists within.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _RoleState.__new__(_RoleState)
__props__.__dict__["attributes"] = attributes
__props__.__dict__["client_id"] = client_id
__props__.__dict__["composite_roles"] = composite_roles
__props__.__dict__["description"] = description
__props__.__dict__["name"] = name
__props__.__dict__["realm_id"] = realm_id
return Role(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def attributes(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
A map representing attributes for the role. In order to add multivalue attributes, use `##` to seperate the values. Max length for each value is 255 chars
"""
return pulumi.get(self, "attributes")
@property
@pulumi.getter(name="clientId")
def client_id(self) -> pulumi.Output[Optional[str]]:
"""
When specified, this role will be created as a client role attached to the client with the provided ID
"""
return pulumi.get(self, "client_id")
@property
@pulumi.getter(name="compositeRoles")
def composite_roles(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
When specified, this role will be a composite role, composed of all roles that have an ID present within this list.
"""
return pulumi.get(self, "composite_roles")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the role
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the role
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="realmId")
def realm_id(self) -> pulumi.Output[str]:
"""
The realm this role exists within.
"""
return pulumi.get(self, "realm_id")
| 39.559801
| 242
| 0.603527
| 2,749
| 23,815
| 5.055657
| 0.079665
| 0.075191
| 0.067492
| 0.052238
| 0.888257
| 0.873507
| 0.860699
| 0.850338
| 0.846669
| 0.832134
| 0
| 0.004386
| 0.291539
| 23,815
| 601
| 243
| 39.625624
| 0.819346
| 0.446903
| 0
| 0.758621
| 1
| 0
| 0.07708
| 0.002221
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159483
| false
| 0.00431
| 0.021552
| 0
| 0.275862
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fb69b7a117c5f9cb06f9dab7d261ff3c459c7524
| 8,156
|
py
|
Python
|
defences/FashionMNIST/regularization.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
defences/FashionMNIST/regularization.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
defences/FashionMNIST/regularization.py
|
calinbiberea/imperial-individual-project
|
86f224f183b8348d21b4c7a4aed408cd1ca41df1
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
from tqdm.notebook import tnrange, tqdm
# For loading model sanely
import os.path
import sys
# For Jacobian Regularization
from jacobian import JacobianReg
# This here actually adds the path
sys.path.append("../../")
import models.lenet as lenet
# Define the `device` PyTorch will be running on, please hope it is CUDA
device = "cuda" if torch.cuda.is_available() else "cpu"
print("Notebook will use PyTorch Device: " + device.upper())
def jacobian_training(
trainSetLoader,
load_if_available=False,
load_path="../models_data/FashionMNIST/fashion_mnist_jacobian",
**kwargs
):
# Various training parameters
epochs = 20
learning_rate = 0.01
# Network parameters
loss_function = nn.CrossEntropyLoss()
model = lenet.LeNet5().to(device)
model.train()
jacobian_reg = JacobianReg()
jacobian_reg_lambda = 0.01
# Consider using ADAM here as another gradient descent algorithm
optimizer = torch.optim.SGD(
model.parameters(), lr=learning_rate, momentum=0.9, weight_decay=5e-4
)
# If a trained model already exists, give up the training part
if load_if_available and os.path.isfile(load_path):
print("Found already trained model...")
model = torch.load(load_path)
print("... loaded!")
else:
print("Training the model...")
# Use a pretty progress bar to show updates
for epoch in tnrange(epochs, desc="Jacobian Regularization Training Progress"):
for _, (images, labels) in enumerate(tqdm(trainSetLoader, desc="Batches")):
# Cast to proper tensors
images, labels = images.to(device), labels.to(device)
# Require gradients for Jacobian regularization
images.requires_grad = True
# Predict and optimise
optimizer.zero_grad()
# Predict
logits = model(images)
# Calculate loss
loss = loss_function(logits, labels)
# Introduce Jacobian regularization
jacobian_reg_loss = jacobian_reg(images, logits)
# Total loss
loss = loss + jacobian_reg_lambda * jacobian_reg_loss
# Gradient descent
loss.backward()
optimizer.step()
print("... done!")
# Make sure the model is in eval mode before returning
model.eval()
return model
def ALP_training(
trainSetLoader,
attack_name,
attack_function,
load_if_available=False,
load_path="../models_data/FashionMNIST/fashion_mnist_alp",
**kwargs
):
# Various training parameters
epochs = 20
learning_rate = 0.01
# ALP factor
alp_loss_function = nn.MSELoss()
alp_lamda = 0.2
# Network parameters
loss_function = nn.CrossEntropyLoss()
model = lenet.LeNet5().to(device)
model.train()
# Consider using ADAM here as another gradient descent algorithm
optimizer = torch.optim.SGD(
model.parameters(), lr=learning_rate, momentum=0.9, weight_decay=5e-4
)
# If a trained model already exists, give up the training part
if load_if_available and os.path.isfile(load_path):
print("Found already trained model...")
model = torch.load(load_path)
print("... loaded!")
else:
print("Training the model...")
# Check if using epsilon
if "epsilon" in kwargs:
epsilon = kwargs["epsilon"]
else:
epsilon = None
# Check if using alpha
if "alpha" in kwargs:
alpha = kwargs["alpha"]
else:
alpha = None
# Get iterations
if "iterations" in kwargs:
iterations = kwargs["iterations"]
else:
iterations = None
# Use a pretty progress bar to show updates
for epoch in tnrange(epochs, desc="Adversarial Training Progress"):
for _, (images, labels) in enumerate(tqdm(trainSetLoader, desc="Batches")):
# Cast to proper tensors
images, labels = images.to(device), labels.to(device)
# Run the attack
model.eval()
perturbed_images = attack_function(
images,
labels,
model,
loss_function,
epsilon=epsilon,
alpha=alpha,
scale=True,
iterations=iterations,
)
model.train()
# Predict and optimise
optimizer.zero_grad()
logits = model(images)
loss = loss_function(logits, labels) + alp_lamda * alp_loss_function(
model(images), model(perturbed_images)
)
# Gradient descent
loss.backward()
optimizer.step()
print("... done!")
# Make sure the model is in eval mode before returning
model.eval()
return model
def jacobian_ALP_training(
trainSetLoader,
attack_name,
attack_function,
load_if_available=False,
load_path="../models_data/FashionMNIST/fashion_mnist_jacobian_alp",
**kwargs
):
# Various training parameters
epochs = 20
learning_rate = 0.01
# Jacobian Factor
jacobian_reg = JacobianReg()
jacobian_reg_lambda = 0.01
# ALP factor
alp_loss_function = nn.MSELoss()
alp_lamda = 0.2
# Network parameters
loss_function = nn.CrossEntropyLoss()
model = lenet.LeNet5().to(device)
model.train()
# Consider using ADAM here as another gradient descent algorithm
optimizer = torch.optim.SGD(
model.parameters(), lr=learning_rate, momentum=0.9, weight_decay=5e-4
)
# If a trained model already exists, give up the training part
if load_if_available and os.path.isfile(load_path):
print("Found already trained model...")
model = torch.load(load_path)
print("... loaded!")
else:
print("Training the model...")
# Check if using epsilon
if "epsilon" in kwargs:
epsilon = kwargs["epsilon"]
else:
epsilon = None
# Check if using alpha
if "alpha" in kwargs:
alpha = kwargs["alpha"]
else:
alpha = None
# Get iterations
if "iterations" in kwargs:
iterations = kwargs["iterations"]
else:
iterations = None
# Use a pretty progress bar to show updates
for epoch in tnrange(epochs, desc="Adversarial Training Progress"):
for _, (images, labels) in enumerate(tqdm(trainSetLoader, desc="Batches")):
# Cast to proper tensors
images, labels = images.to(device), labels.to(device)
# Require gradients for Jacobian regularization
images.requires_grad = True
# Run the attack
model.eval()
perturbed_images = attack_function(
images,
labels,
model,
loss_function,
epsilon=epsilon,
alpha=alpha,
scale=True,
iterations=iterations,
)
model.train()
# Predict and optimise
optimizer.zero_grad()
logits = model(images)
loss = loss_function(logits, labels) + alp_lamda * alp_loss_function(
model(images), model(perturbed_images)
)
# Introduce Jacobian regularization
jacobian_reg_loss = jacobian_reg(images, logits)
# Total loss
loss = loss + jacobian_reg_lambda * jacobian_reg_loss
# Gradient descent
loss.backward()
optimizer.step()
print("... done!")
# Make sure the model is in eval mode before returning
model.eval()
return model
| 28.027491
| 87
| 0.576753
| 861
| 8,156
| 5.343786
| 0.178862
| 0.031298
| 0.019561
| 0.013041
| 0.897196
| 0.891111
| 0.883504
| 0.883504
| 0.865899
| 0.865899
| 0
| 0.007445
| 0.341221
| 8,156
| 290
| 88
| 28.124138
| 0.848874
| 0.187347
| 0
| 0.855491
| 0
| 0
| 0.093812
| 0.022655
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017341
| false
| 0
| 0.040462
| 0
| 0.075145
| 0.075145
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fb82e21654f94caf74e378984dd91c4d538c388d
| 7,779
|
py
|
Python
|
tests/test_observable/test_slice.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
tests/test_observable/test_slice.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
tests/test_observable/test_slice.py
|
christiansandberg/RxPY
|
036027d2858ea6c9d45839c863bd791e5bb50c36
|
[
"MIT"
] | null | null | null |
import unittest
from reactivex.observable.observable import Observable
from reactivex.testing import ReactiveTest, TestScheduler
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestSlice(unittest.TestCase):
def test_slice_empty(self):
scheduler = TestScheduler()
msgs = [on_next(150, 1), on_completed(250)]
xs = scheduler.create_hot_observable(msgs)
def create() -> Observable[int]:
return xs[1:42]
res = scheduler.start(create=create).messages
assert res == [on_completed(250)]
def test_slice_same(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, -2),
on_next(150, -1),
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
)
def create():
return xs[0:10]
results = scheduler.start(create)
assert results.messages == [
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(415),
]
assert xs.subscriptions == [subscribe(200, 415)]
def test_slice_same_noop(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, -2),
on_next(150, -1),
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
)
def create():
return xs[:]
results = scheduler.start(create)
assert results.messages == [
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
]
assert xs.subscriptions == [subscribe(200, 690)]
def test_slice_skip_first(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, -2),
on_next(150, -1),
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
)
def create():
return xs[2:]
results = scheduler.start(create)
assert results.messages == [
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
]
assert xs.subscriptions == [subscribe(200, 690)]
def test_slice_skip_last(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, -2),
on_next(150, -1),
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
)
def create():
return xs[:-2]
results = scheduler.start(create)
assert results.messages == [
on_next(270, 0),
on_next(280, 1),
on_next(300, 2),
on_next(310, 3),
on_next(340, 4),
on_next(370, 5),
on_next(410, 6),
on_next(415, 7),
on_completed(690),
]
assert xs.subscriptions == [subscribe(200, 690)]
def test_slice_take_last(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, -2),
on_next(150, -1),
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
)
def create():
return xs[-2:]
results = scheduler.start(create)
assert results.messages == [on_next(690, 8), on_next(690, 9), on_completed(690)]
assert xs.subscriptions == [subscribe(200, 690)]
def test_slice_take_first(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, -2),
on_next(150, -1),
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
)
def create():
return xs[:2]
results = scheduler.start(create)
assert results.messages == [on_next(210, 0), on_next(230, 1), on_completed(230)]
assert xs.subscriptions == [subscribe(200, 230)]
def test_slice_take_last_skip_all(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, -2),
on_next(150, -1),
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
)
def create():
return xs[-2:0]
results = scheduler.start(create)
assert results.messages == [on_completed(200)]
def test_slice_step_2(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(70, -2),
on_next(150, -1),
on_next(210, 0),
on_next(230, 1),
on_next(270, 2),
on_next(280, 3),
on_next(300, 4),
on_next(310, 5),
on_next(340, 6),
on_next(370, 7),
on_next(410, 8),
on_next(415, 9),
on_completed(690),
)
def create():
return xs[0:10:2]
results = scheduler.start(create)
assert results.messages == [
on_next(210, 0),
on_next(270, 2),
on_next(300, 4),
on_next(340, 6),
on_next(410, 8),
on_completed(415),
]
assert xs.subscriptions == [subscribe(200, 415)]
| 28.184783
| 88
| 0.485152
| 943
| 7,779
| 3.778367
| 0.076352
| 0.242492
| 0.041257
| 0.033679
| 0.811395
| 0.783609
| 0.775189
| 0.775189
| 0.761156
| 0.736177
| 0
| 0.142524
| 0.392981
| 7,779
| 275
| 89
| 28.287273
| 0.612029
| 0
| 0
| 0.759184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065306
| 1
| 0.073469
| false
| 0
| 0.012245
| 0.036735
| 0.126531
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83a023d5a0d5d241473fe2f61dc7c8ec3cb69169
| 5,463
|
py
|
Python
|
lib/turkish_nltk/trnltk/treebank/explorer.py
|
myasiny/wordembed
|
d4df516a4ac6eed71d1cc6e085638e895c525de6
|
[
"MIT"
] | null | null | null |
lib/turkish_nltk/trnltk/treebank/explorer.py
|
myasiny/wordembed
|
d4df516a4ac6eed71d1cc6e085638e895c525de6
|
[
"MIT"
] | null | null | null |
lib/turkish_nltk/trnltk/treebank/explorer.py
|
myasiny/wordembed
|
d4df516a4ac6eed71d1cc6e085638e895c525de6
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""
Copyright 2012 Ali Ok (aliokATapacheDOTorg)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from trnltk.parseset.xmlbindings import UnparsableWordBinding, DerivationalSuffixBinding
from trnltk.treebank.model import HierarchicalIndex
class ConcordanceIndex(object):
def offsets(self, sth, syntactic_category, secondary_syntactic_category):
raise NotImplementedError()
class CompleteWordConcordanceIndex(ConcordanceIndex):
def __init__(self, word_list):
self._offsets = HierarchicalIndex(3)
for index, word in enumerate(word_list):
if isinstance(word, UnparsableWordBinding):
continue
self._offsets.insert(index, word.str, word.syntactic_category, word.secondary_syntactic_category)
def offsets(self, word_str, syntactic_category=None, secondary_syntactic_category=None):
assert word_str is not None
if secondary_syntactic_category:
assert syntactic_category is not None
args = [word_str, syntactic_category, secondary_syntactic_category]
args = filter(lambda x : x is not None, args)
return self._offsets.get(*args)
class RootConcordanceIndex(ConcordanceIndex):
def __init__(self, word_list):
self._offsets = HierarchicalIndex(3)
for index, word in enumerate(word_list):
if isinstance(word, UnparsableWordBinding):
continue
self._offsets.insert(index, word.root.str, word.root.syntactic_category, word.root.secondary_syntactic_category)
def offsets(self, word_str, syntactic_category=None, secondary_syntactic_category=None):
assert word_str is not None
if secondary_syntactic_category:
assert syntactic_category is not None
args = [word_str, syntactic_category, secondary_syntactic_category]
args = filter(lambda x : x is not None, args)
return self._offsets.get(*args)
class DictionaryItemConcordanceIndex(ConcordanceIndex):
def __init__(self, word_list):
self._offsets = HierarchicalIndex(3)
for index, word in enumerate(word_list):
if isinstance(word, UnparsableWordBinding):
continue
self._offsets.insert(index, word.root.lemma_root, word.root.syntactic_category, word.root.secondary_syntactic_category)
def offsets(self, word_str, syntactic_category=None, secondary_syntactic_category=None):
assert word_str is not None
if secondary_syntactic_category:
assert syntactic_category is not None
args = [word_str, syntactic_category, secondary_syntactic_category]
args = filter(lambda x : x is not None, args)
return self._offsets.get(*args)
class TransitionWordConcordanceIndex(ConcordanceIndex):
def __init__(self, word_list):
self._offsets = HierarchicalIndex(3)
for index, word in enumerate(word_list):
if isinstance(word, UnparsableWordBinding):
continue
secondary_syntactic_category = word.root.secondary_syntactic_category
for suffix in word.suffixes:
syntactic_category = suffix.to_syntactic_category
if isinstance(suffix, DerivationalSuffixBinding):
secondary_syntactic_category = None
self._offsets.insert(index, suffix.word, syntactic_category, secondary_syntactic_category)
def offsets(self, word_str, syntactic_category=None, secondary_syntactic_category=None):
assert word_str is not None
if secondary_syntactic_category:
assert syntactic_category is not None
args = [word_str, syntactic_category, secondary_syntactic_category]
args = filter(lambda x : x is not None, args)
return self._offsets.get(*args)
class TransitionMatchedWordConcordanceIndex(ConcordanceIndex):
def __init__(self, word_list):
self._offsets = HierarchicalIndex(3)
for index, word in enumerate(word_list):
if isinstance(word, UnparsableWordBinding):
continue
syntactic_category = word.root.syntactic_category
secondary_syntactic_category = word.root.secondary_syntactic_category
for suffix in word.suffixes:
syntactic_category = suffix.to_syntactic_category
if isinstance(suffix, DerivationalSuffixBinding):
secondary_syntactic_category = None
self._offsets.insert(index, suffix.matched_word, syntactic_category, secondary_syntactic_category)
def offsets(self, word_str, syntactic_category=None, secondary_syntactic_category=None):
assert word_str is not None
if secondary_syntactic_category:
assert syntactic_category is not None
args = [word_str, syntactic_category, secondary_syntactic_category]
args = filter(lambda x : x is not None, args)
return self._offsets.get(*args)
| 39.875912
| 131
| 0.713161
| 620
| 5,463
| 6.05
| 0.190323
| 0.244735
| 0.18715
| 0.063983
| 0.765396
| 0.7494
| 0.7494
| 0.7494
| 0.7494
| 0.7494
| 0
| 0.003301
| 0.223687
| 5,463
| 136
| 132
| 40.169118
| 0.88116
| 0.106718
| 0
| 0.813953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116279
| 1
| 0.127907
| false
| 0
| 0.023256
| 0
| 0.27907
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
83bd222dea7863fec989a6804684d9360261104a
| 8,970
|
py
|
Python
|
tests/unittest/benchmark_runner/common/clouds/shared/s3/test_s3_operations.py
|
kpouget/benchmark-runner
|
eecdb57d12f8c17268800632722af8fe8046185a
|
[
"Apache-2.0"
] | 10
|
2021-07-21T21:44:20.000Z
|
2022-02-24T22:01:13.000Z
|
tests/unittest/benchmark_runner/common/clouds/shared/s3/test_s3_operations.py
|
kpouget/benchmark-runner
|
eecdb57d12f8c17268800632722af8fe8046185a
|
[
"Apache-2.0"
] | 83
|
2021-07-20T14:37:44.000Z
|
2022-03-24T13:48:04.000Z
|
tests/unittest/benchmark_runner/common/clouds/shared/s3/test_s3_operations.py
|
kpouget/benchmark-runner
|
eecdb57d12f8c17268800632722af8fe8046185a
|
[
"Apache-2.0"
] | 6
|
2021-07-14T21:12:48.000Z
|
2022-02-15T12:48:27.000Z
|
import boto3
import tempfile
import os
from os import listdir
from os.path import isfile, join
from benchmark_runner.common.clouds.shared.s3.s3_operations import S3Operations
# walk around for moto DeprecationWarning
import warnings
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
from moto import mock_s3
@mock_s3
def test_upload_file():
""" This test for testing upload data into s3 bucket"""
expected_file_name = 'file.txt'
with tempfile.TemporaryDirectory() as temp_local_directory:
with open(os.path.join(temp_local_directory, expected_file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_file(file_name_path=os.path.join(temp_local_directory, expected_file_name),
bucket='benchmark',
key='test-data',
upload_file=expected_file_name)
assert s3operations.file_exist(bucket='benchmark', key='test-data', file_name=expected_file_name)
@mock_s3
def test_download_file():
""" This test for testing upload data into s3 bucket"""
expected_file_name = 'file.txt'
with tempfile.TemporaryDirectory() as temp_local_directory1:
with open(os.path.join(temp_local_directory1, expected_file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_file(file_name_path=os.path.join(temp_local_directory1, expected_file_name),
bucket='benchmark', key='test-data', upload_file=expected_file_name)
with tempfile.TemporaryDirectory() as temp_local_directory2:
s3operations.download_file(bucket='benchmark', key='test-data', download_file=expected_file_name,
file_name_path=os.path.join(temp_local_directory2, expected_file_name))
assert os.path.exists(os.path.join(temp_local_directory2, expected_file_name))
@mock_s3
def test_upload_objects():
""" This test for testing upload data into s3 bucket"""
expected_files_list = ['file1.txt', 'file2.txt']
actual_files_list = []
with tempfile.TemporaryDirectory() as temp_local_directory:
for file_name in expected_files_list:
with open(os.path.join(temp_local_directory, file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
data_bucket = s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_objects(local_source=temp_local_directory, s3_target='benchmark/test-data')
for obj in data_bucket.objects.all():
for file in expected_files_list:
if file in obj.key:
actual_files_list.append(file)
assert sorted(actual_files_list) == sorted(expected_files_list)
@mock_s3
def test_upload_objects_no_key():
""" This test for testing upload data into s3 bucket"""
expected_files_list = ['file1.txt', 'file2.txt']
actual_files_list = []
with tempfile.TemporaryDirectory() as temp_local_directory:
for file_name in expected_files_list:
with open(os.path.join(temp_local_directory, file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
data_bucket = s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_objects(local_source=temp_local_directory, s3_target='benchmark')
for obj in data_bucket.objects.all():
for file in expected_files_list:
if file in obj.key:
actual_files_list.append(file)
assert sorted(actual_files_list) == sorted(expected_files_list)
@mock_s3
def test_download_objects():
""" This test for testing upload data into s3 bucket"""
expected_files_list = ['file1.txt', 'file2.txt']
with tempfile.TemporaryDirectory() as temp_local_directory1:
for file_name in expected_files_list:
with open(os.path.join(temp_local_directory1, file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_objects(local_source=temp_local_directory1, s3_target='benchmark/test-data')
with tempfile.TemporaryDirectory() as temp_local_directory2:
s3operations.download_objects(s3_target='benchmark/test-data', local_source=temp_local_directory2)
actual_files_list = [f for f in listdir(temp_local_directory2) if isfile(join(temp_local_directory2, f))]
assert sorted(actual_files_list) == sorted(expected_files_list)
@mock_s3
def test_download_objects_no_key():
""" This test for testing upload data into s3 bucket"""
expected_files_list = ['file1.txt', 'file2.txt']
actual_files_list = []
with tempfile.TemporaryDirectory() as temp_local_directory1:
for file_name in expected_files_list:
with open(os.path.join(temp_local_directory1, file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_objects(local_source=temp_local_directory1, s3_target='benchmark')
with tempfile.TemporaryDirectory() as temp_local_directory2:
s3operations.download_objects(s3_target='benchmark', local_source=temp_local_directory2)
actual_files_list = [f for f in listdir(temp_local_directory2) if isfile(join(temp_local_directory2, f))]
assert sorted(actual_files_list) == sorted(expected_files_list)
@mock_s3
def test_file_exist():
""" This test for testing upload data into s3 bucket"""
expected_file_name = 'file.txt'
with tempfile.TemporaryDirectory() as temp_local_directory1:
with open(os.path.join(temp_local_directory1, expected_file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_file(file_name_path=os.path.join(temp_local_directory1, expected_file_name),
bucket='benchmark', key='test-data', upload_file=expected_file_name)
assert s3operations.file_exist(bucket='benchmark', key='test-data', file_name=expected_file_name)
@mock_s3
def test_file_delete():
""" This test for testing upload data into s3 bucket"""
expected_file_name = 'file.txt'
with tempfile.TemporaryDirectory() as temp_local_directory1:
with open(os.path.join(temp_local_directory1, expected_file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_file(file_name_path=os.path.join(temp_local_directory1, expected_file_name),
bucket='benchmark', key='test-data', upload_file=expected_file_name)
s3operations.delete_file(bucket='benchmark', key='test-data', file_name=expected_file_name)
assert not s3operations.file_exist(bucket='benchmark', key='test-data', file_name=expected_file_name)
@mock_s3
def test_folder_delete():
""" This test for testing upload data into s3 bucket"""
expected_files_list = ['file1.txt', 'file2.txt']
with tempfile.TemporaryDirectory() as temp_local_directory1:
for file_name in expected_files_list:
with open(os.path.join(temp_local_directory1, file_name), 'w') as f:
f.write('test')
s3_resource = boto3.resource('s3', region_name='us-east-1')
s3_resource.create_bucket(Bucket='benchmark')
s3operations = S3Operations(region_name='us-east-1')
s3operations.upload_objects(local_source=temp_local_directory1, s3_target='benchmark/test-data')
s3operations.delete_folder(bucket='benchmark', key='test-data')
assert not s3operations.file_exist(bucket='benchmark', key='test-data', file_name=expected_files_list[0])
assert not s3operations.file_exist(bucket='benchmark', key='test-data', file_name=expected_files_list[1])
| 50.677966
| 117
| 0.698439
| 1,166
| 8,970
| 5.091767
| 0.070326
| 0.059289
| 0.061984
| 0.048509
| 0.9333
| 0.92033
| 0.912582
| 0.908877
| 0.905508
| 0.872158
| 0
| 0.022456
| 0.195764
| 8,970
| 176
| 118
| 50.965909
| 0.800527
| 0.053623
| 0
| 0.741259
| 0
| 0
| 0.089292
| 0
| 0
| 0
| 0
| 0
| 0.06993
| 1
| 0.062937
| false
| 0
| 0.055944
| 0
| 0.118881
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
83d17aaafb8e16da16c24d1be6fabc590f92e323
| 217
|
py
|
Python
|
utils/response/response.py
|
Dimas4/Web-Crawler-API-Flask
|
9b6a787be9abdb4391a2c1be81268ddec79a58dd
|
[
"MIT"
] | null | null | null |
utils/response/response.py
|
Dimas4/Web-Crawler-API-Flask
|
9b6a787be9abdb4391a2c1be81268ddec79a58dd
|
[
"MIT"
] | null | null | null |
utils/response/response.py
|
Dimas4/Web-Crawler-API-Flask
|
9b6a787be9abdb4391a2c1be81268ddec79a58dd
|
[
"MIT"
] | null | null | null |
from flask import Response as Response_Flask
class Response:
def response_400(self):
return Response_Flask(status=400)
def response_404(self):
return Response_Flask("Nor found", status=404)
| 21.7
| 54
| 0.718894
| 29
| 217
| 5.206897
| 0.482759
| 0.258278
| 0.238411
| 0.304636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069767
| 0.207373
| 217
| 9
| 55
| 24.111111
| 0.80814
| 0
| 0
| 0
| 0
| 0
| 0.041475
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
83f8d5d3756df83951a251c8c608267a81ac0805
| 44,764
|
py
|
Python
|
model/plot_utils.py
|
patriacaelum/HydraDX-simulations
|
57948099c924309636647ccd0768afc60a28f705
|
[
"Apache-2.0"
] | null | null | null |
model/plot_utils.py
|
patriacaelum/HydraDX-simulations
|
57948099c924309636647ccd0768afc60a28f705
|
[
"Apache-2.0"
] | 4
|
2021-10-14T15:51:46.000Z
|
2021-10-17T18:44:03.000Z
|
model/plot_utils.py
|
patriacaelum/HydraDX-simulations
|
57948099c924309636647ccd0768afc60a28f705
|
[
"Apache-2.0"
] | 1
|
2021-10-14T05:45:47.000Z
|
2021-10-14T05:45:47.000Z
|
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
def impermanent_loss_plot(rdf):
pool = rdf.pool
oracles = rdf[['oracle_price_i', 'oracle_price_j']]
def get_price(pool, key):
return pool.get_price(key)
price_i = pool.apply(get_price, key='i')
price_j = pool.apply(get_price, key='j')
impermanent_loss_i = (price_i - oracles['oracle_price_i']) / oracles['oracle_price_i']
impermanent_loss_j = (price_j - oracles['oracle_price_j']) / oracles['oracle_price_j']
plt.figure(figsize=(20, 6))
plt.subplot(121)
plt.plot(impermanent_loss_i)
plt.xlabel("Timestep")
plt.ylabel("Impermanent Loss")
plt.subplot(122)
plt.plot(impermanent_loss_j)
plt.xlabel("Timestep")
plt.ylabel("Impermanent Loss")
def hydra_pool_plot(experiments,test_title,T, asset_id):
"""
For any asset on the risk side of the Hydra Omnipool this function plots quantities of:
- its reserve
- its shares
- its price
- its coefficient
"""
asset_R = []
asset_S = []
asset_P = []
asset_C = []
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
# print(df.index)
df.reset_index()
for i in range (df.substep.max(),T+df.substep.max(),df.substep.max()):
asset_R_list = []
asset_R_list.append(df.pool[i].pool[asset_id]['R'])
asset_R.append(asset_R_list)
asset_S_list = []
asset_S_list.append(df.pool[i].pool[asset_id]['S'])
asset_S.append(asset_S_list)
asset_P_list = []
asset_P_list.append(df.pool[i].pool[asset_id]['P'])
# agent_h.append(np.mean(agent_h_list))
asset_P.append(asset_P_list)
asset_C_list = []
asset_C_list.append(df.pool[i].pool[asset_id]['C'])
# agent_h.append(np.mean(agent_h_list))
asset_C.append(asset_C_list)
plt.figure(figsize=(20,6))
plt.subplot(141)
plt.plot(df.timestep,asset_R,label='Asset Reserve', marker='o')
plt.xlabel('Timestep')
plt.ylabel('Reserves')
plt.legend()
plt.title('Reserve' + ' for Asset ' + str(asset_id))
plt.subplot(142)
# plt.plot(range(df.substep.max(),T+df.substep.max(),df.substep.max()),asset_S,label='Asset Shares', marker='o')
plt.plot(df.timestep,asset_S,label='Asset Shares', marker='o') # asset_S
plt.xlabel('Timestep')
plt.ylabel('Shares')
plt.legend()
plt.title('Shares' + ' for Asset ' + str(asset_id))
plt.subplot(143)
plt.plot(df.timestep,asset_P,label='Asset '+ asset_id + ' Price', marker='o')
plt.xlabel('Timestep')
plt.ylabel('Price')
plt.legend()
plt.title('Price' + ' for Asset ' + str(asset_id))
plt.subplot(144)
plt.plot(df.timestep,asset_C,label='Asset '+ asset_id + ' Price', marker='o')
plt.xlabel('Timestep')
plt.ylabel('Coefficient')
plt.legend()
plt.title('Coefficient' + ' for Asset ' + str(asset_id))
plt.show()
def hydra_pool_price_plot(experiments,test_title,T, asset_id_list):
"""
For any selection of assets on the risk side of the Hydra omnipool this function plots their prices.
"""
asset_P = {k:[] for k in asset_id_list}
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
for asset_id in asset_P:
asset_P_list = []
for i in range(df.substep.max(),T, df.substep.max()):
# for asset_id in asset_id_list:
# print(asset_id)
asset_P_list = []
asset_P_list.append(df.pool[i].pool[asset_id]['P'])
# agent_h.append(np.mean(agent_h_list))
# asset_P[asset_id].append(asset_P_list)
asset_P[str(asset_id)].append(df.pool[i].pool[asset_id]['P'])
# print(asset_P)
plt.figure(figsize=(12, 8))
for asset_id in asset_id_list:
plt.plot(range(df.substep.max(),T,df.substep.max()),asset_P[asset_id],label='Asset '+ asset_id + ' Price in H', marker='o')
# plt.plot(range(df.substep.max(),T,df.substep.max()),asset_P[asset_id_list[1]],label='Asset '+ asset_id_list[1] + ' Price', marker='o')
# print(asset_P)
plt.legend()
plt.title(test_title + ' for Asset ' + str(asset_id_list))
plt.xlabel('Timestep')
plt.ylabel('Asset Base Price')
plt.show()
def hydra_agent_value_plot_rev(experiments,test_title,T): #, agent_index, asset_id):
"""
This function plots agent values for each agent that went through the Hydra World.
Values are token holdings multiplied by prices.
In tokens since they are virtual should be valued as shares, should fix the seeming negative token gain
"""
agent_h = []
agent_r_i_out = []
agent_s_i = []
agent_r_j_out = []
agent_s_j = []
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
number_of_agents = 8
for agent_index in range(number_of_agents):
agent_h = []
agent_r_i_out = []
agent_s_i = []
agent_r_j_out = []
agent_s_j = []
for i in range(df.substep.max(),T, df.substep.max()):
agent_h_list = []
agent_h_list.append(df.hydra_agents.values[i]['h'][agent_index])
# agent_h.append(np.mean(agent_h_list))
agent_h.append(agent_h_list)
asset_id = 'i'
agent_r_i_out_list= []
agent_r_i_out_list.append(df.hydra_agents.values[i]['r_' + asset_id + '_out'][agent_index])
p_rq_list = []
p_rq_list.append(float(df.pool[i].pool[asset_id]['P']))
agent_r_i_out.append(np.divide(agent_r_i_out_list,p_rq_list))
agent_s_i_list= []
s_i_pool = []
q_reserve = []
agent_s_i_list.append(int(df.hydra_agents.values[i]['s_' + asset_id][agent_index]))
s_i_pool.append(int(df.Sq.values[i]))
q_reserve.append(int(df.Q.values[i]))
agent_s_i.append(np.multiply(np.divide(agent_s_i_list,s_i_pool),q_reserve))
sub_total_i = np.add(agent_r_i_out,agent_s_i)
agent_total = np.add(sub_total_i,agent_h)
plt.figure(figsize=(10, 5))
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_h,label='agent_h', marker='o')
asset_id = 'i'
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_r_i_out,label='agent_r_' + asset_id + '_out',marker='o')
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_s_i,label='agent_s_' + asset_id,marker='o')
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_total,label='agent_total',marker='o')
plt.legend()
plt.title(test_title + ' for Agent ' + str(agent_index))
plt.xlabel('Timestep')
plt.ylabel('Agent Holdings Value')
plt.show()
def hydra_agent_value_plot(experiments,test_title,T): #, agent_index, asset_id):
"""
This function plots agent values for each agent that went through the Hydra World.
Values are token holdings multiplied by prices.
"""
agent_h = []
agent_r_i_out = []
agent_s_i = []
agent_r_j_out = []
agent_s_j = []
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
number_of_agents = 8
for agent_index in range(number_of_agents):
agent_h = []
agent_r_i_out = []
agent_s_i = []
agent_r_j_out = []
agent_s_j = []
for i in range(df.substep.max(),T, df.substep.max()):
agent_h_list = []
agent_h_list.append(df.hydra_agents.values[i]['h'][agent_index])
# agent_h.append(np.mean(agent_h_list))
agent_h.append(agent_h_list)
asset_id = 'i'
agent_r_i_out_list= []
agent_r_i_out_list.append(df.hydra_agents.values[i]['r_' + asset_id + '_out'][agent_index])
p_rq_list = []
p_rq_list.append(float(df.pool[i].pool[asset_id]['P']))
agent_r_i_out.append(np.divide(agent_r_i_out_list,p_rq_list))
agent_s_i_list= []
s_i_pool = []
q_reserve = []
agent_s_i_list.append(int(df.hydra_agents.values[i]['s_' + asset_id][agent_index]))
s_i_pool.append(int(df.Sq.values[i]))
q_reserve.append(int(df.Q.values[i]))
agent_s_i.append(np.multiply(np.divide(agent_s_i_list,s_i_pool),q_reserve))
asset_id = 'j'
agent_r_j_out_list= []
agent_r_j_out_list.append(df.hydra_agents.values[i]['r_' + asset_id + '_out'][agent_index])
p_rq_list = []
p_rq_list.append(float(df.pool[i].pool[asset_id]['P']))
agent_r_j_out.append(np.divide(agent_r_j_out_list,p_rq_list))
agent_s_j_list= []
s_j_pool = []
q_reserve = []
agent_s_j_list.append(int(df.hydra_agents.values[i]['s_' + asset_id][agent_index]))
s_j_pool.append(int(df.Sq.values[i]))
q_reserve.append(int(df.Q.values[i]))
agent_s_j.append(np.multiply(np.divide(agent_s_j_list,s_j_pool),q_reserve))
sub_total_i = np.add(agent_r_i_out,agent_s_i)
sub_total_j = np.add(agent_r_j_out,agent_s_j)
agent_total = np.add(np.add(sub_total_i,sub_total_j),agent_h)
plt.figure(figsize=(10, 5))
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_h,label='agent_h', marker='o')
asset_id = 'i'
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_r_i_out,label='agent_r_' + asset_id + '_out',marker='o')
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_s_i,label='agent_s_' + asset_id,marker='o')
asset_id = 'j'
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_r_j_out,label='agent_r_' + asset_id + '_out',marker='o')
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_s_j,label='agent_s_' + asset_id,marker='o')
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_total,label='agent_total',marker='o')
plt.legend()
plt.title(test_title + ' for Agent ' + str(agent_index))
plt.xlabel('Timestep')
plt.ylabel('Agent Holdings Value')
plt.show()
def agent_value_plot(experiments,test_title,T): #, agent_index, asset_id):
"""
This function plots agent values for each agent that went through the Uniswap World.
Values are token holdings multiplied by prices.
"""
agent_h = []
agent_r_i_out = []
agent_s_i = []
agent_r_j_out = []
agent_s_j = []
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
number_of_agents = 8
for agent_index in range(number_of_agents):
agent_h = []
agent_r_i_out = []
agent_s_i = []
agent_r_j_out = []
agent_s_j = []
for i in range (0,T):
agent_h_list = []
agent_h_list.append(df.uni_agents.values[i]['h'][agent_index])
# agent_h.append(np.mean(agent_h_list))
agent_h.append(agent_h_list)
asset_id = 'i'
agent_r_i_out_list= []
agent_r_i_out_list.append(df.uni_agents.values[i]['r_' + asset_id + '_out'][agent_index])
p_rq_list = []
p_rq_list.append(df['UNI_P_RQ' + asset_id].values[i])
agent_r_i_out.append(np.divide(agent_r_i_out_list,p_rq_list))
agent_s_i_list= []
s_i_pool = []
q_reserve = []
agent_s_i_list.append(df.uni_agents.values[i]['s_' + asset_id][agent_index])
s_i_pool.append(df['UNI_S' + asset_id].values[i])
q_reserve.append(df['UNI_S' + asset_id].values[i])
agent_s_i.append(np.multiply(np.divide(agent_s_i_list,s_i_pool),q_reserve))
asset_id = 'j'
agent_r_j_out_list= []
agent_r_j_out_list.append(df.uni_agents.values[i]['r_' + asset_id + '_out'][agent_index])
p_rq_list = []
p_rq_list.append(df['UNI_P_RQ' + asset_id].values[i])
agent_r_j_out.append(np.divide(agent_r_j_out_list,p_rq_list))
agent_s_j_list= []
s_j_pool = []
q_reserve = []
agent_s_j_list.append(df.uni_agents.values[i]['s_' + asset_id][agent_index])
s_j_pool.append(df['UNI_S' + asset_id].values[i])
q_reserve.append(df['UNI_S' + asset_id].values[i])
agent_s_j.append(np.multiply(np.divide(agent_s_j_list,s_j_pool),q_reserve))
sub_total_i = np.add(agent_r_i_out,agent_s_i)
sub_total_j = np.add(agent_r_j_out,agent_s_j)
agent_total = np.add(np.add(sub_total_i,sub_total_j),agent_h)
# print(agent_s_i)
fig = plt.figure(figsize=(10, 5))
plt.plot(range(0,T),agent_h,label='agent_h', marker='o')
asset_id = 'i'
plt.plot(range(0,T),agent_r_i_out,label='agent_r_' + asset_id + '_out',marker='o')
plt.plot(range(0,T),agent_s_i,label='agent_s_' + asset_id,marker='o')
asset_id = 'j'
plt.plot(range(0,T),agent_r_j_out,label='agent_r_' + asset_id + '_out',marker='o')
plt.plot(range(0,T),agent_s_j,label='agent_s_' + asset_id,marker='o')
plt.plot(range(0,T),agent_total,label='agent_total',marker='o')
plt.legend()
plt.title(test_title + ' for Agent ' + str(agent_index))
plt.xlabel('Timestep')
plt.ylabel('Agent Holdings Value')
plt.show()
def hydra_agent_plot(experiments,test_title,T): #, agent_index):
"""
This function plots asset holdings for each agent that went through the Hydra World.
Asset holdings are token quantities held by the agent.
"""
agent_h = []
agent_r_i_out = []
agent_r_i_in = []
agent_r_j_out = []
agent_r_j_in = []
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
number_of_agents = 8
for agent_index in range(number_of_agents):
agent_h = []
agent_r_i_out = []
agent_r_i_in = []
agent_r_j_out = []
agent_r_j_in = []
for i in range (0,T):
agent_h_list = []
agent_h_list.append(df.hydra_agents.values[i]['h'][agent_index])
agent_h.append(np.mean(agent_h_list))
agent_r_i_out_list= []
agent_r_i_out_list.append(df.hydra_agents.values[i]['r_i_out'][agent_index])
agent_r_i_out.append(np.mean(agent_r_i_out_list))
agent_r_i_in_list= []
agent_r_i_in_list.append(df.hydra_agents.values[i]['r_i_in'][agent_index])
agent_r_i_in.append(np.mean(agent_r_i_in_list))
agent_r_j_out_list= []
agent_r_j_out_list.append(df.hydra_agents.values[i]['r_j_out'][agent_index])
agent_r_j_out.append(np.mean(agent_r_j_out_list))
agent_r_j_in_list= []
agent_r_j_in_list.append(df.hydra_agents.values[i]['r_j_in'][agent_index])
agent_r_j_in.append(np.mean(agent_r_j_in_list))
plt.figure(figsize=(10, 5))
plt.plot(range(0,T),agent_h,label='agent_h', marker='o')
plt.plot(range(0,T),agent_r_i_out,label='agent_r_i_out',marker='o')
plt.plot(range(0,T),agent_r_i_in,label='agent_r_i_in',marker='o')
plt.plot(range(0,T),agent_r_j_out,label='agent_r_j_out',marker='o')
plt.plot(range(0,T),agent_r_j_in,label='agent_r_j_in',marker='o')
plt.legend()
plt.title(test_title + str(agent_index))
plt.xlabel('Timestep')
plt.ylabel('Tokens')
plt.show()
def agent_plot(experiments,test_title,T): #, agent_index, asset_id):
"""
This function plots asset holdings for each agent that went through the Uniswap World.
Asset holdings are token quantities held by the agent.
"""
agent_h = []
agent_r_i_out = []
agent_r_i_in = []
agent_s_i = []
# asset_P = {k:[] for k in asset_id_list}
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
number_of_agents = 8
for agent_index in range(number_of_agents):
agent_h = []
agent_r_i_out = []
agent_r_i_in = []
agent_r_j_out = []
agent_r_j_in = []
for i in range (0,T):
agent_h_list = []
agent_h_list.append(df.uni_agents.values[i]['h'][agent_index])
agent_h.append(np.mean(agent_h_list))
asset_id = 'i'
agent_r_i_out_list= []
agent_r_i_out_list.append(df.uni_agents.values[i]['r_' + asset_id + '_out'][agent_index])
agent_r_i_out.append(np.mean(agent_r_i_out_list))
agent_r_i_in_list= []
agent_r_i_in_list.append(df.uni_agents.values[i]['r_' + asset_id + '_in'][agent_index])
agent_r_i_in.append(np.mean(agent_r_i_in_list))
asset_id = 'j'
agent_r_j_out_list= []
agent_r_j_out_list.append(df.uni_agents.values[i]['r_' + asset_id + '_out'][agent_index])
agent_r_j_out.append(np.mean(agent_r_j_out_list))
agent_r_j_in_list= []
agent_r_j_in_list.append(df.uni_agents.values[i]['r_' + asset_id + '_in'][agent_index])
agent_r_j_in.append(np.mean(agent_r_j_in_list))
plt.figure(figsize=(10, 5))
# plt.subplot(121)
plt.plot(range(0,T),agent_h,label='agent_h', marker='o')
asset_id = 'i'
plt.plot(range(0,T),agent_r_i_out,label='agent_r_' + asset_id + '_out',marker='o')
plt.plot(range(0,T),agent_r_i_in,label='agent_r_' + asset_id + '_in',marker='o')
asset_id = 'j'
plt.plot(range(0,T),agent_r_j_out,label='agent_r_' + asset_id + '_out',marker='o')
plt.plot(range(0,T),agent_r_j_in,label='agent_r_' + asset_id + '_in',marker='o')
plt.legend()
plt.title(test_title + str(agent_index))
plt.xlabel('Timestep')
plt.ylabel('Tokens')
plt.show()
def mean_agent_plot(experiments,test_title,T):
"""
This function shows mean agent holdings in the Uniswap World.
"""
agent_h = []
agent_r_i_out = []
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
for i in range(df.substep.max(),T, df.substep.max()):
agent_h_list = []
agent_h_list.append(df.uni_agents.values[i]['h'])
agent_h.append(np.mean(agent_h_list))
agent_r_i_out_list= []
agent_r_i_out_list.append(df.uni_agents.values[i]['r_i_out'])
agent_r_i_out.append(np.mean(agent_r_i_out_list))
fig = plt.figure(figsize=(15, 10))
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_h,label='agent_h', marker='o')
plt.plot(range(df.substep.max(),T, df.substep.max()),agent_r_i_out,label='agent_r_i_out',marker='o')
plt.legend()
plt.title(test_title)
plt.xlabel('Timestep')
plt.ylabel('Tokens')
plt.show()
def price_plot(experiments,test_title, price_swap, numerator, denominator):
"""
This function shows two plots of swap prices of two assets in the Uniswap World.
Once where fees are included and once without fees.
"""
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
plt.figure(figsize=(12, 8))
token_ratio = df[denominator]/ df[numerator]
plt.plot(df[price_swap],label='Swap Price', marker='o')
plt.plot(token_ratio,label='Pool Ratio Price',marker='o')
plt.legend()
plt.title(test_title)
plt.xlabel('Timestep')
plt.ylabel('Price')
plt.show()
def IL_plot(experiments,test_title, periods):
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
plt.figure(figsize=(12, 8))
UNI_IL_i = 2* np.sqrt(df.UNI_P_RQi.pct_change(periods)) / (1 + df.UNI_P_RQi.pct_change(periods)) - 1
UNI_IL_j = 2* np.sqrt(df.UNI_P_RQj.pct_change(periods)) / (1 + df.UNI_P_RQj.pct_change(periods)) - 1
plt.plot(UNI_IL_i,label='Asset i', marker='o')
plt.plot(UNI_IL_j,label='Asset j',marker='o')
plt.legend()
plt.title(test_title + str(periods))
plt.xlabel('Timestep')
plt.ylabel('Price')
plt.show()
def trade_liq_plot(experiments,test_title,T, asset_id_list):
"""
Plot share to reserve ratio - S/R for each asset
"""
asset_R = {k:[] for k in asset_id_list}
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
plt.figure(figsize=(12, 8))
# plt.subplot(121)
for asset_id in asset_R:
asset_R_list = []
# asset_S_list = []
for i in range(df.substep.max(),T, df.substep.max()):
asset_R_list = []
# asset_S_list = []
R = df.pool[i].pool[asset_id]['R']
S = df.pool[i].pool[asset_id]['S']
# asset_R_list.append(S/R)
# asset_R[str(asset_id)].append(asset_R_list)
# asset_R_list.append(df.pool[i].pool[asset_id]['R'])
asset_R[str(asset_id)].append(S/R)
for asset_id in asset_id_list:
plt.plot(range(df.substep.max(),T,df.substep.max()),asset_R[asset_id],label='Asset '+ asset_id, marker='o')
plt.legend()
plt.title(test_title)
plt.xlabel('Timestep')
plt.ylabel('Share to Reserve Ratio')
plt.show()
def rel_price_plot(experiments,test_title,T, asset_id_list):
"""
asset_id_list is an asset pair only to view relative prices
"""
asset_R = {k:[] for k in asset_id_list}
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
plt.figure(figsize=(12, 8))
# plt.subplot(121)
for asset_id in asset_R:
asset_R_list = []
# asset_S_list = []
for i in range(df.substep.max(),T, df.substep.max()):
asset_R_list = []
# asset_S_list = []
R = df.pool[i].pool[asset_id]['R']
S = df.pool[i].pool[asset_id]['S']
# asset_R_list.append(S/R)
# asset_R[str(asset_id)].append(asset_R_list)
# asset_R_list.append(df.pool[i].pool[asset_id]['R'])
asset_R[str(asset_id)].append(R/S)
i_in_j = [j / i for i,j in zip(*asset_R.values())]
j_in_i = [i / j for i,j in zip(*asset_R.values())]
# print(res)
# for asset_id in asset_id_list:
# if asset_id=='i':
# # plt.plot(range(df.substep.max(),T,df.substep.max()),asset_R[asset_id],label='Asset '+ asset_id, marker='o')
# plt.plot(range(df.substep.max(),T,df.substep.max()),i_in_j,label='Asset Price '+ asset_id + ', j', marker='o')
# elif asset_id=='j':
# # plt.plot(range(df.substep.max(),T,df.substep.max()),asset_R[asset_id],label='Asset '+ asset_id, marker='o')
# plt.plot(range(df.substep.max(),T,df.substep.max()),j_in_i,label='Asset Price '+ asset_id + ', i', marker='o')
for count, asset_id in enumerate(asset_id_list):
# print(count, asset_id_list[count], asset_id_list[count-1])
# if asset_id=='i':
# plt.plot(range(df.substep.max(),T,df.substep.max()),asset_R[asset_id],label='Asset '+ asset_id, marker='o')
if count == 0:
plt.plot(range(df.substep.max(),T,df.substep.max()),i_in_j,label='Asset Price '+ asset_id + ',' +asset_id_list[count-1], marker='o')
else:
plt.plot(range(df.substep.max(),T,df.substep.max()),j_in_i,label='Asset Price '+ asset_id + ',' +asset_id_list[count-1], marker='o')
plt.legend()
plt.title(test_title)
plt.xlabel('Timestep')
plt.ylabel('Price')
plt.show()
def relative_value_plot(experiments,test_title, T, asset_id_list):
"""
Plot relative value change- delta R*P for each asset
"""
asset_R = {k:[] for k in asset_id_list}
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
for asset_id in asset_R:
asset_R_list = []
asset_P_list = []
for i in range(df.substep.max(),T, df.substep.max()):
asset_R_list = []
# asset_S_list = []
R = df.pool[i].pool[asset_id]['R']
P = df.pool[i].pool[asset_id]['P']
asset_R[str(asset_id)].append(R*P)
# print(asset_P)
plt.figure(figsize=(12, 8))
for asset_id in asset_id_list:
value_df = pd.DataFrame(asset_R[asset_id])
value_df = value_df.pct_change()
value_df.iloc[0] = 0
# print(value_df)
plt.plot(value_df,label='Asset '+ asset_id, marker='o')
plt.legend()
plt.title(test_title + ' for Asset ' + str(asset_id_list))
plt.xlabel('Timestep')
plt.ylabel('Asset Relative Value Change')
plt.show()
def relative_liq_plot(experiments,test_title, T, asset_id_list):
"""
Plot relative liquidity change- delta S for each asset
"""
asset_S = {k:[] for k in asset_id_list}
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
for asset_id in asset_S:
for i in range(df.substep.max(),T, df.substep.max()):
S = df.pool[i].pool[asset_id]['S']
asset_S[str(asset_id)].append(S)
# print(asset_P)
plt.figure(figsize=(12, 8))
for asset_id in asset_id_list:
value_df = pd.DataFrame(asset_S[asset_id])
value_df = value_df.pct_change()
value_df.iloc[0] = 0
# print(value_df)
plt.plot(value_df,label='Asset '+ asset_id, marker='o')
plt.legend()
plt.title(test_title + ' for Asset ' + str(asset_id_list))
plt.xlabel('Timestep')
plt.ylabel('Asset Liquidity Change')
plt.show()
def slippage_plot(experiments,test_title, T, asset_id_list):
"""
Plot relative liquidity change- delta S for each asset
"""
asset_S = {k:[] for k in asset_id_list}
df = experiments
df = df[df['substep'] == df.substep.max()]
df.fillna(0,inplace=True)
dR = 0
for asset_id in asset_S:
for i in range(df.substep.max(),T, df.substep.max()):
S = df.pool[i].pool[asset_id]['S']
R = df.pool[i].pool[asset_id]['R']
# need to get diff here
# dR = R - df.pool[i].pool[asset_id]['R']
# asset_S[str(asset_id)].append(S*dR/R)
asset_S[str(asset_id)].append(S/R)
i_in_j = [j / i - 1 for i,j in zip(*asset_S.values())]
# print(asset_P)
plt.figure(figsize=(12, 8))
# for asset_id in asset_id_list:
# value_df = pd.DataFrame(asset_S[asset_id])
# value_df = value_df.pct_change()
# value_df.iloc[0] = 0
# # print(value_df)
# plt.plot(value_df,label='Asset '+ asset_id, marker='o')
plt.plot(i_in_j,label='Slippage '+ str(asset_id_list), marker='o')
plt.legend()
plt.title(test_title + ' for Asset ' + str(asset_id_list))
plt.xlabel('Timestep')
plt.ylabel('Asset Liquidity Change')
plt.show()
def param_test_plot(experiments, config_ids, swept_variable, y_variable, *args):
"""
experiments is the simulation result dataframe.
config_ids is the list configs executed upon in the simulation.
swept_variable is the key (string) in config_ids that was being tested against.
y_variable is the state_variable (string) to be plotted against default timestep.
*args for plotting more state_variables (string).
"""
experiments = experiments.sort_values(by =['subset']).reset_index(drop=True)
cols = 1
rows = 1
cc_idx = 0
while cc_idx<len(experiments):
cc = experiments.iloc[cc_idx]['subset']
cc_label = experiments.iloc[cc_idx]['subset']
secondary_label = [item['M'][swept_variable] for item in config_ids if item["subset_id"]== cc_label]
sub_experiments = experiments[experiments['subset']==cc]
cc_idx += len(sub_experiments)
fig, axs = plt.subplots(ncols=cols, nrows=rows, figsize=(15*cols,7*rows))
df = sub_experiments.copy()
colors = ['orange', 'g', 'magenta', 'r', 'k' ]
ax = axs
title = swept_variable + ' Effect on ' + y_variable + '\n' + 'Scenario: ' + str(secondary_label[0]) + ' ' + swept_variable
# + 'Scenario: ' + str(cc_label) + ' rules_price'
ax.set_title(title)
ax.set_ylabel('Funds')
df.plot(x='timestep', y=y_variable, label=y_variable, ax=ax, legend=True, kind ='scatter')
for count, arg in enumerate(args):
df.plot(x='timestep', y=arg, label=arg, ax=ax, legend=True, color = colors[count], kind ='scatter')
ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
ax.set_xlabel('Timesteps')
ax.grid(color='0.9', linestyle='-', linewidth=1)
plt.tight_layout()
fig.tight_layout(rect=[0, 0, 1, .97])
fig.patch.set_alpha(1)
plt.close()
return display(fig)
def param_fan_plot3(experiments, config_ids, swept_variable, y_variable, *args):
"""
experiments is the simulation result dataframe.
config_ids is the list configs executed upon in the simulation.
swept_variable is the key (string) in config_ids that was being tested against.
y_variable is the state_variable (string) to be plotted against default timestep.
*args for plotting more state_variables (string).
"""
experiments = experiments.sort_values(by =['subset']).reset_index(drop=True)
cols = 1
rows = 1
cc_idx = 0
while cc_idx<len(experiments):
cc = experiments.iloc[cc_idx]['subset']
cc_label = experiments.iloc[cc_idx]['subset']
secondary_label = [item['M'][swept_variable] for item in config_ids if item["subset_id"]== cc_label]
sub_experiments = experiments[experiments['subset']==cc]
cc_idx += len(sub_experiments)
fig, axs = plt.subplots(ncols=cols, nrows=rows, figsize=(15*cols,7*rows))
df = sub_experiments.copy()
df = df.groupby('timestep').agg({y_variable: ['min', 'mean', 'max']}).reset_index()
colors = ['orange', 'g', 'magenta', 'r', 'k' ]
ax = axs
title = swept_variable + ' Effect on ' + y_variable + '\n' + 'Scenario: ' + str(secondary_label[0]) + ' ' + swept_variable
# + 'Scenario: ' + str(cc_label) + ' rules_price'
ax.set_title(title)
ax.set_ylabel('Funds')
df.plot(x='timestep', y=(y_variable,'mean'),label = y_variable, ax=ax, legend=True)
ax.fill_between(df.timestep, df[(y_variable,'min')], df[(y_variable,'max')], alpha=0.5)
ax.set_xlabel('Blocks')
ax.grid(color='0.9', linestyle='-', linewidth=1)
plt.tight_layout()
fig.tight_layout(rect=[0, 0, 1, .97])
fig.patch.set_alpha(1)
plt.close()
return display(fig)
def param_fan_plot(experiments, config_ids, swept_variable, y_variable, x_var, *args):
"""
experiments is the simulation result dataframe.
config_ids is the list configs executed upon in the simulation.
swept_variable is the key (string) in config_ids that was being tested against.
y_variable is the state_variable (string) to be plotted against default timestep.
*args for plotting more state_variables (string).
"""
experiments = experiments.sort_values(by =['subset']).reset_index(drop=True)
cols = 1
rows = 1
cc_idx = 0
while cc_idx<len(experiments):
cc = experiments.iloc[cc_idx]['subset']
cc_label = experiments.iloc[cc_idx]['subset']
secondary_label = [item['M'][swept_variable] for item in config_ids if item["subset_id"]== cc_label]
sub_experiments = experiments[experiments['subset']==cc]
cc_idx += len(sub_experiments)
fig, axs = plt.subplots(ncols=cols, nrows=rows, figsize=(15*cols,7*rows))
df = sub_experiments.copy()
df = df.groupby('timestep').agg({x_var: ['min', 'mean', 'max']}).reset_index()
colors = ['orange', 'g', 'magenta', 'r', 'k' ]
ax = axs
title = swept_variable + ' Effect on ' + y_variable + '\n' + 'Scenario: ' + str(secondary_label[0]) + ' ' + swept_variable
# + 'Scenario: ' + str(cc_label) + ' rules_price'
ax.set_title(title)
ax.set_ylabel('Funds')
df.plot(x='timestep', y=(x_var,'mean'),label = y_variable, ax=ax, legend=True)
ax.fill_between(df.timestep, df[(x_var,'min')], df[(x_var,'max')], alpha=0.3)
ax.set_xlabel('Blocks')
ax.grid(color='0.9', linestyle='-', linewidth=1)
plt.tight_layout()
fig.tight_layout(rect=[0, 0, 1, .97])
fig.patch.set_alpha(1)
plt.close()
return display(fig)
def param_fan_plot2(experiments, config_ids, swept_variable, y_variable, *args):
"""
experiments is the simulation result dataframe.
config_ids is the list configs executed upon in the simulation.
swept_variable is the key (string) in config_ids that was being tested against.
y_variable is the state_variable (string) to be plotted against default timestep.
*args for plotting more state_variables (string).
"""
experiments = experiments.sort_values(by =['subset']).reset_index(drop=True)
cols = 1
rows = 1
cc_idx = 0
while cc_idx<len(experiments):
cc = experiments.iloc[cc_idx]['subset']
cc_label = experiments.iloc[cc_idx]['subset']
secondary_label = [item['M'][swept_variable] for item in config_ids if item["subset_id"]== cc_label]
sub_experiments = experiments[experiments['subset']==cc]
cc_idx += len(sub_experiments)
fig, axs = plt.subplots(ncols=cols, nrows=rows, figsize=(15*cols,7*rows))
df = sub_experiments.copy()
df = df.groupby('timestep').agg({'UNI_Ri': ['min', 'mean', 'max']}).reset_index()
colors = ['orange', 'g', 'magenta', 'r', 'k' ]
ax = axs
title = swept_variable + ' Effect on ' + y_variable + '\n' + 'Scenario: ' + str(secondary_label[0]) + ' ' + swept_variable
# + 'Scenario: ' + str(cc_label) + ' rules_price'
ax.set_title(title)
ax.set_ylabel('Funds')
df.plot(x='timestep', y=('UNI_Ri','mean'),label = y_variable, ax=ax, legend=True)
ax.fill_between(df.timestep, df[('UNI_Ri','min')], df[('UNI_Ri','max')], alpha=0.3)
ax.set_xlabel('Blocks')
ax.grid(color='0.9', linestyle='-', linewidth=1)
plt.tight_layout()
fig.tight_layout(rect=[0, 0, 1, .97])
fig.patch.set_alpha(1)
plt.close()
return display(fig)
def param_pool_plot(experiments, config_ids, swept_variable, asset_id, y_variable, *args):
"""
experiments is the simulation result dataframe.
config_ids is the list configs executed upon in the simulation.
swept_variable is the key (string) in config_ids that was being tested against.
asset_id is the asset identifier in the pool (string) e.g i,j,k
y_variable is the state_variable (string) to be plotted against default timestep.
*args for plotting more state_variables (string).
"""
experiments = experiments.sort_values(by =['subset']).reset_index(drop=True)
cols = 1
rows = 1
cc_idx = 0
while cc_idx<len(experiments):
cc = experiments.iloc[cc_idx]['subset']
cc_label = experiments.iloc[cc_idx]['subset']
secondary_label = [item['M'][swept_variable] for item in config_ids if item["subset_id"]== cc_label]
sub_experiments = experiments[experiments['subset']==cc]
cc_idx += len(sub_experiments)
fig, axs = plt.subplots(ncols=cols, nrows=rows, figsize=(15*cols,7*rows))
df = sub_experiments.copy()
df_label = y_variable + asset_id
df[df_label] = df.pool.apply(lambda x: np.array(x.pool[asset_id][y_variable]))
colors = ['orange', 'g', 'magenta', 'r', 'k' ]
ax = axs
title = swept_variable + ' Effect on Pool Asset ' + asset_id + '\n' + 'Scenario: ' + str(secondary_label[0]) + ' ' + swept_variable
# + 'Scenario: ' + str(cc_label) + ' rules_price'
ax.set_title(title)
ax.set_ylabel('Funds')
df.plot(x='timestep', y=df_label, label=df_label, ax=ax, legend=True, kind ='scatter')
for count, arg in enumerate(args):
df_arg_label = arg + asset_id
df[df_arg_label] = df.pool.apply(lambda x: np.array(x.pool[asset_id][arg]))
df.plot(x='timestep', y=df_arg_label, label=df_arg_label, ax=ax, legend=True, color = colors[count], kind ='scatter')
ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
ax.set_xlabel('Timesteps')
ax.grid(color='0.9', linestyle='-', linewidth=1)
plt.tight_layout()
fig.tight_layout(rect=[0, 0, 1, .97])
fig.patch.set_alpha(1)
plt.close()
return display(fig)
def param_pool_fan_plot(experiments, config_ids, swept_variable, asset_id, y_variable, *args):
"""
experiments is the simulation result dataframe.
config_ids is the list configs executed upon in the simulation.
swept_variable is the key (string) in config_ids that was being tested against.
asset_id is the asset identifier in the pool (string) e.g i,j,k
y_variable is the state_variable (string) to be plotted against default timestep.
*args for plotting more state_variables (string).
"""
experiments = experiments.sort_values(by =['subset']).reset_index(drop=True)
cols = 1
rows = 1
cc_idx = 0
while cc_idx<len(experiments):
cc = experiments.iloc[cc_idx]['subset']
cc_label = experiments.iloc[cc_idx]['subset']
secondary_label = [item['M'][swept_variable] for item in config_ids if item["subset_id"]== cc_label]
sub_experiments = experiments[experiments['subset']==cc]
cc_idx += len(sub_experiments)
fig, axs = plt.subplots(ncols=cols, nrows=rows, figsize=(15*cols,7*rows))
df = sub_experiments.copy()
df = df.pool.groupby('timestep').agg({y_variable: ['min', 'mean', 'max']}).reset_index()
df_label = y_variable + asset_id
df[df_label] = df.pool.apply(lambda x: np.array(x.pool[asset_id][y_variable]))
colors = ['orange', 'g', 'magenta', 'r', 'k' ]
ax = axs
title = swept_variable + ' Effect on Pool Asset ' + asset_id + '\n' + 'Scenario: ' + str(secondary_label[0]) + ' ' + swept_variable
# + 'Scenario: ' + str(cc_label) + ' rules_price'
ax.set_title(title)
ax.set_ylabel('Funds')
df.plot(x='timestep', y=df_label, label=df_label, ax=ax, legend=True, kind ='scatter')
for count, arg in enumerate(args):
df_arg_label = arg + asset_id
df[df_arg_label] = df.pool.apply(lambda x: np.array(x.pool[asset_id][arg]))
df.plot(x='timestep', y=df_arg_label, label=df_arg_label, ax=ax, legend=True, color = colors[count], kind ='scatter')
ax.fill_between(df.timestep, x.pool[(y_variable,'min')], x.pool[(y_variable,'max')], alpha=0.5)
ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
ax.set_xlabel('Timesteps')
ax.grid(color='0.9', linestyle='-', linewidth=1)
plt.tight_layout()
fig.tight_layout(rect=[0, 0, 1, .97])
fig.patch.set_alpha(1)
plt.close()
return display(fig)
def slippage_fan_plot(swept_var, sweep_dict, sl_kpis, market_information):
colors = ['green', 'blue']
subset_array = list(sl_kpis.keys())
MC_simulation_array = list(sl_kpis[subset_array[0]][list(market_information)[0]].keys())
markets = market_information.keys()
ncols = len(markets)
for measure in ['slippage', 'elasticity']:
for subset in subset_array:
for asset in ['i', 'j', 'all']:
fig, axs = plt.subplots(ncols=ncols, nrows=1, figsize=(15,7))
for i, market in enumerate(markets):
title_fig = f"{market} {measure}, sweep value {sweep_dict[subset]} of parameter '{swept_var}' for asset '{asset}'"
axs[i].set_title(title_fig)
axs[i].set_xlabel('Trade Sequence')
p = pd.concat([sl_kpis[subset][market][x][measure][asset] for x in MC_simulation_array], axis = 1)
axs[i].plot(p.mean(axis=1))
p10 = p.quantile(0.10, axis = 1)
p25 = p.quantile(0.25, axis = 1)
p75 = p.quantile(0.75, axis = 1)
p90 = p.quantile(0.90, axis = 1)
axs[i].fill_between(p.index, p25, p75, alpha = 0.5)
axs[i].fill_between(p.index, p10, p90, alpha = 0.25, color=colors[i])
plt.close()
display(fig)
def impermanent_loss_fan_plot(swept_var, sweep_dict, il_kpis, market_information):
colors = ['green', 'blue']
subset_array = list(il_kpis.keys())
MC_simulation_array = list(il_kpis[subset_array[0]][list(market_information)[0]].keys())
markets = market_information.keys()
ncols = len(markets)
for subset in subset_array:
fig, axs = plt.subplots(ncols=ncols, nrows=1, figsize=(15,7))
for i, market in enumerate(markets):
title_fig = f"{market} IL, sweep value {sweep_dict[subset]} of parameter '{swept_var}'"
axs[i].set_title(title_fig)
axs[i].set_xlabel('Trade Sequence')
p = pd.concat([il_kpis[subset][market][x]['impermanent_loss'] for x in MC_simulation_array], axis = 1)
axs[i].plot(p.mean(axis=1))
p10 = p.quantile(0.10, axis = 1)
p25 = p.quantile(0.25, axis = 1)
p75 = p.quantile(0.75, axis = 1)
p90 = p.quantile(0.90, axis = 1)
axs[i].fill_between(p.index, p25, p75, alpha = 0.5)
axs[i].fill_between(p.index, p10, p90, alpha = 0.25, color=colors[i])
plt.close()
display(fig)
def param_pool_simulation_plot(experiments, config_ids, swept_variable, asset_id, y_variable, *args):
"""
experiments is the simulation result dataframe.
config_ids is the list configs executed upon in the simulation.
swept_variable is the key (string) in config_ids that was being tested against.
asset_id is the asset identifier in the pool (string) e.g i,j,k
y_variable is the state_variable (string) to be plotted against default timestep.
*args for plotting more state_variables (string).
"""
experiments = experiments.sort_values(by =['subset']).reset_index(drop=True)
cols = 1
rows = 1
cc_idx = 0
while cc_idx<len(experiments):
cc = experiments.iloc[cc_idx]['subset']
cc_label = experiments.iloc[cc_idx]['subset']
secondary_label = [item['M'][swept_variable] for item in config_ids if item["subset_id"]== cc_label]
sub_experiments = experiments[experiments['subset']==cc]
cc_idx += len(sub_experiments)
fig, axs = plt.subplots(ncols=cols, nrows=rows, figsize=(15*cols,7*rows))
df = sub_experiments.copy()
df_label = y_variable + asset_id
df[df_label] = df.pool.apply(lambda x: np.array(x.pool[asset_id][y_variable]))
colors = ['orange', 'g', 'magenta', 'r', 'k' ]
df = df.groupby('timestep').agg({df_label: ['min', 'mean', 'max']}).reset_index()
ax = axs
title = swept_variable + ' Effect on Pool Asset ' + asset_id + '\n' + 'Scenario: ' + str(secondary_label[0]) + ' ' + swept_variable
# + 'Scenario: ' + str(cc_label) + ' rules_price'
ax.set_title(title)
ax.set_ylabel('Funds')
df.plot(x='timestep', y=(df_label,'mean'), label=df_label, ax=ax, legend=True, kind ='scatter')
ax.fill_between(df.timestep, df[(df_label,'min')], df[(df_label,'max')], alpha=0.3)
for count, arg in enumerate(args):
df = sub_experiments.copy()
df_arg_label = arg + asset_id
df[df_arg_label] = df.pool.apply(lambda x: np.array(x.pool[asset_id][arg]))
df = df.groupby('timestep').agg({df_arg_label: ['min', 'mean', 'max']}).reset_index()
df.plot(x='timestep', y=(df_arg_label,'mean'), label=df_arg_label, ax=ax, legend=True, color = colors[count], kind ='scatter')
ax.fill_between(df.timestep, df[(df_arg_label,'min')], df[(df_arg_label,'max')], alpha=0.3)
ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
ax.set_xlabel('Timesteps')
ax.grid(color='0.9', linestyle='-', linewidth=1)
plt.tight_layout()
fig.tight_layout(rect=[0, 0, 1, .97])
fig.patch.set_alpha(1)
plt.close()
return display(fig)
| 38.790295
| 144
| 0.61507
| 6,755
| 44,764
| 3.820725
| 0.045448
| 0.049363
| 0.038591
| 0.017823
| 0.924484
| 0.9079
| 0.891201
| 0.869038
| 0.85261
| 0.830369
| 0
| 0.010956
| 0.235368
| 44,764
| 1,153
| 145
| 38.823938
| 0.743076
| 0.148646
| 0
| 0.788312
| 0
| 0.001299
| 0.071021
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033766
| false
| 0
| 0.003896
| 0.001299
| 0.048052
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f7b2e1a6e1ff58d87aab862819540614ad10053e
| 4,003
|
py
|
Python
|
model/models_nn.py
|
Valdert-13/captcha
|
dd521e0184462ba80fc6f201a10ee9653c5724c8
|
[
"Apache-2.0"
] | 3
|
2020-07-16T22:09:47.000Z
|
2021-04-12T12:51:10.000Z
|
model/models_nn.py
|
Valdert-13/captcha
|
dd521e0184462ba80fc6f201a10ee9653c5724c8
|
[
"Apache-2.0"
] | null | null | null |
model/models_nn.py
|
Valdert-13/captcha
|
dd521e0184462ba80fc6f201a10ee9653c5724c8
|
[
"Apache-2.0"
] | null | null | null |
import tensorflow as tf
from config import *
def model_1():
'Иницилизация структуры модели'
input_img = tf.keras.layers.Input(shape=IMG_SHAPE)
output_code = []
out = tf.keras.layers.Convolution2D(16, (3, 3), padding='same', activation='relu')(input_img)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Convolution2D(32, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.Convolution2D(32, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Convolution2D(64, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.Convolution2D(64, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Flatten()(out)
for _ in range(NUM_CODE_CHARACTERS):
dense = tf.keras.layers.Dense(64, activation='relu')(out)
dropout = tf.keras.layers.Dropout(0.4)(dense)
prediction = tf.keras.layers.Dense(ALL_CHARS_LEN, activation='sigmoid')(dropout)
output_code.append(prediction)
model = tf.keras.Model(input_img, output_code)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
def model_2():
'Иницилизация структуры модели'
input_img = tf.keras.layers.Input(shape=IMG_SHAPE)
output_code = []
out = tf.keras.layers.Convolution2D(16, (3, 3), padding='same', activation='relu')(input_img)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Convolution2D(32, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.Convolution2D(32, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Convolution2D(32, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.Convolution2D(32, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Convolution2D(64, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.Convolution2D(64, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Flatten()(out)
for _ in range(NUM_CODE_CHARACTERS):
dense = tf.keras.layers.Dense(64, activation='relu')(out)
dropout = tf.keras.layers.Dropout(0.4)(dense)
prediction = tf.keras.layers.Dense(ALL_CHARS_LEN, activation='sigmoid')(dropout)
output_code.append(prediction)
model = tf.keras.Model(input_img, output_code)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
def model_3():
'Иницилизация структуры модели'
input_img = tf.keras.layers.Input(shape=IMG_SHAPE)
output_code = []
out = tf.keras.layers.Convolution2D(16, (3, 3), padding='same', activation='relu')(input_img)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Convolution2D(64, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.Convolution2D(64, (3, 3), padding='same', activation='relu')(out)
out = tf.keras.layers.BatchNormalization()(out)
out = tf.keras.layers.MaxPooling2D(padding='same')(out)
out = tf.keras.layers.Flatten()(out)
for _ in range(NUM_CODE_CHARACTERS):
dense = tf.keras.layers.Dense(64, activation='relu')(out)
dropout = tf.keras.layers.Dropout(0.4)(dense)
prediction = tf.keras.layers.Dense(ALL_CHARS_LEN, activation='sigmoid')(dropout)
output_code.append(prediction)
model = tf.keras.Model(input_img, output_code)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
| 47.094118
| 97
| 0.685736
| 535
| 4,003
| 5.052336
| 0.106542
| 0.116537
| 0.201998
| 0.17758
| 0.982612
| 0.982612
| 0.982612
| 0.982612
| 0.982612
| 0.982612
| 0
| 0.028939
| 0.145391
| 4,003
| 85
| 98
| 47.094118
| 0.761181
| 0.022233
| 0
| 0.926471
| 0
| 0
| 0.095904
| 0.017982
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044118
| false
| 0
| 0.029412
| 0
| 0.117647
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f7cfb50c9fd85e1690e54690f7de01c2f993b958
| 6,046
|
py
|
Python
|
examples/Chess (currently extremely slow)/How to get list returned by all_actions().py
|
DenseLance/mcts-simple
|
451d1a7de5c01f6483d2c12d5036b89ac96636e2
|
[
"MIT"
] | 2
|
2022-02-21T05:42:48.000Z
|
2022-02-23T06:54:46.000Z
|
examples/Chess (currently extremely slow)/How to get list returned by all_actions().py
|
DenseLance/mcts-simple
|
451d1a7de5c01f6483d2c12d5036b89ac96636e2
|
[
"MIT"
] | null | null | null |
examples/Chess (currently extremely slow)/How to get list returned by all_actions().py
|
DenseLance/mcts-simple
|
451d1a7de5c01f6483d2c12d5036b89ac96636e2
|
[
"MIT"
] | null | null | null |
def chess_positions():
return [f"{char}{num + 1}" for num in range(8) for char in "abcdefgh"]
def knight_moves():
positions = chess_positions()
moves = []
for position in positions:
position_alpha_ord = ord(position[0])
position_digit = int(position[1])
moves.append(f"{position}{chr(position_alpha_ord + 2)}{min(max(position_digit + 1, 0), 9)}")
moves.append(f"{position}{chr(position_alpha_ord + 2)}{min(max(position_digit - 1, 0), 9)}")
moves.append(f"{position}{chr(position_alpha_ord - 2)}{min(max(position_digit + 1, 0), 9)}")
moves.append(f"{position}{chr(position_alpha_ord - 2)}{min(max(position_digit - 1, 0), 9)}")
moves.append(f"{position}{chr(position_alpha_ord + 1)}{min(max(position_digit + 2, 0), 9)}")
moves.append(f"{position}{chr(position_alpha_ord + 1)}{min(max(position_digit - 2, 0), 9)}")
moves.append(f"{position}{chr(position_alpha_ord - 1)}{min(max(position_digit + 2, 0), 9)}")
moves.append(f"{position}{chr(position_alpha_ord - 1)}{min(max(position_digit - 2, 0), 9)}")
return [move for move in moves if move[-2:] in positions]
def bishop_moves():
def helper(position, positions, x, y):
position_alpha_ord = ord(position[0])
position_digit = int(position[1])
next_pos = f"{chr(position_alpha_ord + x)}{min(max(position_digit + y, 0), 9)}"
if next_pos in positions:
return [next_pos] + helper(next_pos, positions, x, y)
else:
return []
positions = chess_positions()
moves = []
for position in positions:
moves += [position + next_pos for next_pos in helper(position, positions, 1, 1)]
moves += [position + next_pos for next_pos in helper(position, positions, 1, -1)]
moves += [position + next_pos for next_pos in helper(position, positions, -1, 1)]
moves += [position + next_pos for next_pos in helper(position, positions, -1, -1)]
return moves
def rook_moves():
def helper(position, positions, x, y):
position_alpha_ord = ord(position[0])
position_digit = int(position[1])
next_pos = f"{chr(position_alpha_ord + x)}{min(max(position_digit + y, 0), 9)}"
if next_pos in positions:
return [next_pos] + helper(next_pos, positions, x, y)
else:
return []
positions = chess_positions()
moves = []
for position in positions:
moves += [position + next_pos for next_pos in helper(position, positions, 1, 0)]
moves += [position + next_pos for next_pos in helper(position, positions, -1, 0)]
moves += [position + next_pos for next_pos in helper(position, positions, 0, 1)]
moves += [position + next_pos for next_pos in helper(position, positions, 0, -1)]
return moves
def queen_moves():
return rook_moves() + bishop_moves()
def pawn_promotion_moves():
def helper(position, positions, x, y):
position_alpha_ord = ord(position[0])
position_digit = int(position[1])
next_pos = f"{chr(position_alpha_ord + x)}{min(max(position_digit + y, 0), 9)}"
if next_pos in positions:
return [next_pos] + helper(next_pos, positions, x, y)
else:
return []
positions = chess_positions()
pawn_positions_white = [alpha + "7" for alpha in "abcdefgh"]
pawn_positions_black = [alpha + "2" for alpha in "abcdefgh"]
moves = []
for position in pawn_positions_white:
moves += [position + next_pos + "q" for next_pos in helper(position, positions, 0, 1)]
moves += [position + next_pos + "r" for next_pos in helper(position, positions, 0, 1)]
moves += [position + next_pos + "b" for next_pos in helper(position, positions, 0, 1)]
moves += [position + next_pos + "n" for next_pos in helper(position, positions, 0, 1)]
moves += [position + next_pos + "q" for next_pos in helper(position, positions, 1, 1)]
moves += [position + next_pos + "r" for next_pos in helper(position, positions, 1, 1)]
moves += [position + next_pos + "b" for next_pos in helper(position, positions, 1, 1)]
moves += [position + next_pos + "n" for next_pos in helper(position, positions, 1, 1)]
moves += [position + next_pos + "q" for next_pos in helper(position, positions, -1, 1)]
moves += [position + next_pos + "r" for next_pos in helper(position, positions, -1, 1)]
moves += [position + next_pos + "b" for next_pos in helper(position, positions, -1, 1)]
moves += [position + next_pos + "n" for next_pos in helper(position, positions, -1, 1)]
for position in pawn_positions_black:
moves += [position + next_pos + "q" for next_pos in helper(position, positions, 0, -1)]
moves += [position + next_pos + "r" for next_pos in helper(position, positions, 0, -1)]
moves += [position + next_pos + "b" for next_pos in helper(position, positions, 0, -1)]
moves += [position + next_pos + "n" for next_pos in helper(position, positions, 0, -1)]
moves += [position + next_pos + "q" for next_pos in helper(position, positions, 1, -1)]
moves += [position + next_pos + "r" for next_pos in helper(position, positions, 1, -1)]
moves += [position + next_pos + "b" for next_pos in helper(position, positions, 1, -1)]
moves += [position + next_pos + "n" for next_pos in helper(position, positions, 1, -1)]
moves += [position + next_pos + "q" for next_pos in helper(position, positions, -1, -1)]
moves += [position + next_pos + "r" for next_pos in helper(position, positions, -1, -1)]
moves += [position + next_pos + "b" for next_pos in helper(position, positions, -1, -1)]
moves += [position + next_pos + "n" for next_pos in helper(position, positions, -1, -1)]
return moves
def possible_moves():
# Naive way of determining action space, weeds out invalid actions
return knight_moves() + queen_moves() + pawn_promotion_moves()
moves = possible_moves()
| 55.981481
| 100
| 0.634138
| 860
| 6,046
| 4.286047
| 0.067442
| 0.14433
| 0.218394
| 0.17363
| 0.884156
| 0.870049
| 0.870049
| 0.870049
| 0.856484
| 0.856484
| 0
| 0.024636
| 0.227919
| 6,046
| 107
| 101
| 56.504673
| 0.764996
| 0.010586
| 0
| 0.416667
| 0
| 0.083333
| 0.143813
| 0.103512
| 0
| 0
| 0
| 0
| 0
| 1
| 0.104167
| false
| 0
| 0
| 0.03125
| 0.239583
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f72665b02611c25f6224783cfd40d4362312e741
| 62,701
|
py
|
Python
|
tests/job_metadata.py
|
fossabot/DIRBS-Core-1
|
70bf72e2e6dda6e0d7a20cf744300930d88ee70c
|
[
"PostgreSQL",
"Unlicense"
] | null | null | null |
tests/job_metadata.py
|
fossabot/DIRBS-Core-1
|
70bf72e2e6dda6e0d7a20cf744300930d88ee70c
|
[
"PostgreSQL",
"Unlicense"
] | null | null | null |
tests/job_metadata.py
|
fossabot/DIRBS-Core-1
|
70bf72e2e6dda6e0d7a20cf744300930d88ee70c
|
[
"PostgreSQL",
"Unlicense"
] | 3
|
2019-10-24T11:40:06.000Z
|
2022-02-24T07:34:00.000Z
|
"""
job_metadata api data import unit tests.
Copyright (c) 2018-2019 Qualcomm Technologies, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted (subject to the
limitations in the disclaimer below) provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this list of conditions and the following
disclaimer.
- Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided with the distribution.
- Neither the name of Qualcomm Technologies, Inc. nor the names of its contributors may be used to endorse or promote
products derived from this software without specific prior written permission.
- The origin of this software must not be misrepresented; you must not claim that you wrote the original software.
If you use this software in a product, an acknowledgment is required by displaying the trademark/log as per the
details provided here: https://www.qualcomm.com/documents/dirbs-logo-and-brand-guidelines
- Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software.
- This notice may not be removed or altered from any source distribution.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS LICENSE. THIS SOFTWARE IS PROVIDED BY
THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
import json
from flask import url_for
from _fixtures import * # noqa: F403, F401
from _helpers import job_metadata_importer
import dirbs.metadata as metadata
def test_classification_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for classification job.
"""
extra_metadata = {'matched_imei_counts':
{'compound_dimension': 0,
'simple_dimension': 0},
'curr_date': None,
'conditions':
[{'dimensions':
[{'module': 'gsma_not_found'}],
'grace_period_days': 30,
'sticky': False,
'reason': 'Violated simple dimension',
'max_allowed_matching_ratio': 0.1,
'label': 'simple_dimension',
'blocking': True},
{'dimensions':
[{'module': 'stolen_list'},
{'invert': True,
'parameters':
{'threshold': 3.1,
'period_days': 30},
'module': 'duplicate_daily_avg'}],
'grace_period_days': 0,
'sticky': False,
'reason': 'Violated compound dimension',
'max_allowed_matching_ratio': 0.1,
'label': 'compound_dimension',
'blocking': True}]} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=1, subcommand='', status='success',
extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=1,
subcommand='',
status='success',
show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
assert data['extra_metadata'] == extra_metadata
else: # job_metadata api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-classify',
run_id=1,
subcommand='',
status='success',
show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
assert data['_keys']['result_size'] == 1
assert data['_keys']['previous_key'] == ''
assert data['_keys']['next_key'] == ''
assert data['jobs'][0]['command'] == 'dirbs-classify'
assert data['jobs'][0]['run_id'] == 1
assert data['jobs'][0]['subcommand'] == ''
assert data['jobs'][0]['status'] == 'success'
assert data['jobs'][0]['extra_metadata'] == extra_metadata
def test_prune_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for pruning triplets and classification_state job.
"""
extra_metadata = {'rows_before': 0,
'retention_months': 6,
'curr_date': None,
'rows_after': 0}
job_metadata_importer(db_conn=db_conn, command='dirbs-prune',
run_id=9, subcommand='triplets', status='success',
extra_metadata=extra_metadata)
job_metadata_importer(db_conn=db_conn, command='dirbs-prune', run_id=10, subcommand='classification_state',
status='success', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-prune',
status='success',
show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
triplets_data = data[0]
assert triplets_data['command'] == 'dirbs-prune'
assert triplets_data['run_id'] == 9
assert triplets_data['subcommand'] == 'triplets'
assert triplets_data['status'] == 'success'
assert triplets_data['extra_metadata'] == extra_metadata
class_data = data[1]
assert class_data['command'] == 'dirbs-prune'
assert class_data['run_id'] == 10
assert class_data['subcommand'] == 'classification_state'
assert class_data['status'] == 'success'
assert class_data['extra_metadata'] == extra_metadata
else: # job_metadata api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-prune',
status='success',
show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
triplets_data = data['jobs'][0]
assert triplets_data['command'] == 'dirbs-prune'
assert triplets_data['run_id'] == 9
assert triplets_data['subcommand'] == 'triplets'
assert triplets_data['status'] == 'success'
assert triplets_data['extra_metadata'] == extra_metadata
class_data = data['jobs'][1]
assert class_data['command'] == 'dirbs-prune'
assert class_data['run_id'] == 10
assert class_data['subcommand'] == 'classification_state'
assert class_data['status'] == 'success'
assert class_data['extra_metadata'] == extra_metadata
assert data['_keys']['result_size'] == 2
assert data['_keys']['previous_key'] == ''
assert data['_keys']['next_key'] == ''
def test_operator_import_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for importing operator job.
"""
extra_metadata = {'performance_timing':
{'init_staging_end': '2017-08-16T01:05:17.17081+00:00',
'init_staging_start': '2017-08-16T01:05:16.817426+00:00',
'extract_split_start': '2017-08-16T01:05:16.10788+00:00',
'prevalidate_upload_start': '2017-08-16T01:05:17.34236+00:00',
'analyze_staging_end': '2017-08-16T01:05: 20.807413+00:00',
'validation_binary_checks_end': '2017-08-16T01:05:25.565519+00:00',
'prevalidate_upload_end': '2017-08-16T01:05:20.125746+00:00',
'analyze_staging_start': '2017-08-16T01:05:20.296765+00:00',
'preprocess_start': '2017-08-16T01:05:16.474489+00:00',
'extract_split_end': '2017-08-16T01:05:16.301238+00:00',
'preprocess_end': '2017-08-16T01:05:16.645968+00:00',
'postprocess_staging_end': '2017-08-16T01:05:24.531709+00:00',
'validation_threshold_checks_start': '2017-08-16T01:05:25.741384+00:00',
'validation_binary_checks_start': '2017-08-16T01:05:24.705607+00:00',
'postprocess_staging_start': '2017-08-16T01:05:20.978153+00:00'},
'home_threshold': 0.2,
'cc': ['22%'],
'clean_threshold': 0.05,
'null_msisdn_threshold': 0.05,
'perform_leading_zero_check': True,
'perform_file_daterange_check': True,
'perform_null_check': True,
'perform_clean_check': True,
'perform_historic_imsi_check': True,
'perform_null_imsi_check': True,
'perform_null_msisdn_check': True,
'perform_historic_msisdn_check': True,
'operator_id':
'operator1',
'input_file':
'/workspace/data/operator1_home_'
'check_exceeded_20160701_20160731.zip',
'batch_size':
1000000, 'mcc_mnc_pairs':
[{'mnc': '01', 'mcc': '111'}],
'perform_historic_imei_check': True,
'null_imsi_threshold': 0.05,
'perform_rat_import': False,
'perform_null_imei_check': True,
'perform_home_check': True,
'null_imei_threshold': 0.05,
'region_threshold': 0.1,
'perform_region_check': False} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-import', run_id=1, subcommand='operator',
status='error', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version), show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'operator'
assert data['status'] == 'error'
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version), show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
print(data['command'])
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'operator'
assert data['status'] == 'error'
def test_stolen_import_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for importing stolen_list job.
"""
extra_metadata = {'output_stats':
{'num_records_updated': 20,
'num_records': 20,
'num_records_inserted': 20},
'performance_timing':
{'init_staging_end': '2017-08-22T01:42:30.695313+00:00',
'analyze_staging_end': '2017-08-22T01:42:34.286028+00:00',
'validation_threshold_checks_end': '2017-08-22T01:42:36.380127+00:00',
'analyze_staging_start': '2017-08-22T01:42:33.78045+00:00',
'preprocess_start': '2017-08-22T01:42:30.023073+00:00',
'copy_from_staging_end': '2017-08-22T01:42:38.553902+00:00',
'validation_binary_checks_start': '2017-08-22T01:42:35.537445+00:00',
'validation_threshold_checks_start': '2017-08-22T01:42:36.208775+00:00',
'output_stats_start': '2017-08-22T01:42:38.721215+00:00',
'validation_historical_checks_end': '2017-08-22T01:42:37.049421+00:00',
'extract_split_end': '2017-08-22T01:42:29.855514+00:00',
'copy_from_staging_start': '2017-08-22T01:42:37.38383+00:00',
'extract_split_start': '2017-08-22T01:42:29.674068+00:00',
'validation_historical_checks_start': '2017-08-22T01:42:36.547579+00:00',
'preprocess_end': '2017-08-22T01:42:30.191182+00:00',
'postprocess_staging_end': '2017-08-22T01:42:35.370151+00:00',
'init_staging_start': '2017-08-22T01:42:30.358302+00:00',
'validation_binary_checks_end': '2017-08-22T01:42:36.041237+00:00',
'output_stats_end': '2017-08-22T01:42:39.225688+00:00',
'prevalidate_upload_end': '2017-08-22T01:42:33.612194+00:00',
'prevalidate_upload_start': '2017-08-22T01:42:30.862953+00:00',
'postprocess_staging_start': '2017-08-22T01:42:34.458834+00:00'},
'perform_historic_check': True,
'input_file':
'/workspace/data/sample_import_list.zip',
'batch_size': 1000000,
'input_stats':
{'num_records_valid': 20,
'num_records': 20,
'num_records_invalid': 0}} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-import', run_id=1, subcommand='stolen_list',
status='success', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version), command='dirbs-import'))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'stolen_list'
assert data['status'] == 'success'
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version), command='dirbs-import'))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'stolen_list'
assert data['status'] == 'success'
def test_pairing_import_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for importing pairing_list job.
"""
extra_metadata = {'perform_duplicates_check': True,
'perform_historic_check': True,
'performance_timing':
{'init_staging_end': '2017-08-22T01:41:59.925562+00:00',
'init_staging_start': '2017-08-22T01:41:59.588253+00:00',
'extract_split_start': '2017-08-22T01:41:58.901343+00:00',
'prevalidate_upload_start': '2017-08-22T01:42:00.093237+00:00',
'analyze_staging_end': '2017-08-22T01:42:03.478264+00:00',
'prevalidate_upload_end': '2017-08-22T01:42:02.788264+00:00',
'analyze_staging_start': '2017-08-22T01:42:02.956404+00:00',
'preprocess_start': '2017-08-22T01:41:59.252764+00:00',
'extract_split_end': '2017-08-22T01:41:59.08492+00:00',
'preprocess_end': '2017-08-22T01:41:59.421052+00:00',
'postprocess_staging_end': '2017-08-22T01:42:04.520465+00:00',
'validation_binary_checks_start': '2017-08-22T01:42:04.68826+00:00',
'postprocess_staging_start': '2017-08-22T01:42:03.646232+00:00'},
'batch_size': 1000000,
'input_file':
'/workspace/data/duplicate.zip'} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-import', run_id=1, subcommand='pairing_list',
status='error', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version)))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'pairing_list'
assert data['status'] == 'error'
assert data['extra_metadata'] == extra_metadata
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version)))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'pairing_list'
assert data['status'] == 'error'
assert data['extra_metadata'] == extra_metadata
def test_gsma_import_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for importing GSMA TAC data job.
"""
extra_metadata = {'output_stats':
{'num_records_updated': 4,
'num_records': 4,
'num_records_inserted': 4},
'performance_timing':
{'init_staging_end': '2017-08-22T01:56:25.875908+00:00',
'analyze_staging_end': '2017-08-22T01:56:29.386537+00:00',
'validation_threshold_checks_end': '2017-08-22T01:56:31.231756+00:00',
'analyze_staging_start': '2017-08-22T01:56:28.886486+00:00',
'preprocess_start': '2017-08-22T01:56:25.192466+00:00',
'copy_from_staging_end': '2017-08-22T01:56:33.42097+00:00',
'validation_binary_checks_start': '2017-08-22T01:56:30.725186+00:00',
'validation_threshold_checks_start': '2017-08-22T01:56:31.063007+00:00',
'output_stats_start': '2017-08-22T01:56:33.589227+00:00',
'validation_historical_checks_end': '2017-08-22T01:56:31.915001+00:00',
'extract_split_end': '2017-08-22T01:56:25.023654+00:00',
'copy_from_staging_start': '2017-08-22T01:56:32.250857+00:00',
'extract_split_start': '2017-08-22T01:56:24.844737+00:00',
'validation_historical_checks_start': '2017-08-22T01:56:31.400242+00:00',
'preprocess_end': '2017-08-22T01:56:25.368138+00:00',
'postprocess_staging_end': '2017-08-22T01:56:30.557336+00:00',
'init_staging_start': '2017-08-22T01:56:25.536523+00:00',
'validation_binary_checks_end': '2017-08-22T01:56:30.895228+00:00',
'output_stats_end': '2017-08-22T01:56:34.097277+00:00',
'prevalidate_upload_end': '2017-08-22T01:56:28.718421+00:00',
'prevalidate_upload_start': '2017-08-22T01:56:26.043878+00:00',
'postprocess_staging_start': '2017-08-22T01:56:29.554878+00:00'},
'perform_historic_check': True,
'input_file':
'/workspace/data/duplicate_gsma.zip',
'batch_size': 1000000,
'input_stats':
{'num_records_valid': 4,
'num_records': 7,
'num_records_invalid': 3}} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-import', run_id=1, subcommand='gsma_tac',
status='success', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version), show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'gsma_tac'
assert data['status'] == 'success'
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version), show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'gsma_tac'
assert data['status'] == 'success'
def test_registration_import_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for importing registration_list job.
"""
extra_metadata = {'perform_duplicates_check': True,
'perform_historic_check': True,
'performance_timing':
{'init_staging_end': '2017-08-22T01:43:21.386498+00:00',
'init_staging_start': '2017-08-22T01:43:21.035571+00:00',
'extract_split_start': '2017-08-22T01:43:20.35253+00:00',
'prevalidate_upload_start': '2017-08-22T01:43:21.554073+00:00',
'preprocess_start': '2017-08-22T01:43:20.699411+00:00',
'extract_split_end': '2017-08-22T01:43:20.531135+00:00',
'preprocess_end': '2017-08-22T01:43:20.867795+00:00'},
'batch_size': 1000000,
'input_file':
'/workspace/data/'
'sample_import_list.zip'} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-import', run_id=1, subcommand='registration_list',
status='error', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version), command='dirbs-import'))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'registration_list'
assert data['status'] == 'error'
assert data['extra_metadata'] == extra_metadata
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version), command='dirbs-import'))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'registration_list'
assert data['status'] == 'error'
assert data['extra_metadata'] == extra_metadata
def test_golden_import_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for importing golden_list job.
"""
extra_metadata = {'performance_timing':
{'init_staging_end': '2017-08-22T01:43:05.017337+00:00',
'init_staging_start': '2017-08-22T01:43:04.681766+00:00',
'extract_split_start': '2017-08-22T01:43:03.993331+00:00',
'prevalidate_upload_start': '2017-08-22T01:43:05.18436+00:00',
'preprocess_start': '2017-08-22T01:43:04.337401+00:00',
'extract_split_end': '2017-08-22T01:43:04.17081+00:00',
'preprocess_end': '2017-08-22T01:43:04.504815+00:00'},
'perform_historic_check': True,
'pre_hashed': False,
'input_file':
'/workspace/data/sample_import_list.zip',
'batch_size': 1000000} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-import', run_id=1, subcommand='golden_list',
status='error', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version), show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'golden_list'
assert data['status'] == 'error'
assert data['extra_metadata'] == extra_metadata
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version), show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-import'
assert data['run_id'] == 1
assert data['subcommand'] == 'golden_list'
assert data['status'] == 'error'
assert data['extra_metadata'] == extra_metadata
def test_db_schema_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
for db_schema.
"""
job_metadata_importer(db_conn=db_conn, command='dirbs-db', run_id=1, subcommand='upgrade',
status='success')
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version), show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-db'
assert data['run_id'] == 1
assert data['subcommand'] == 'upgrade'
assert data['status'] == 'success'
assert data['extra_metadata'] == {}
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version), show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-db'
assert data['run_id'] == 1
assert data['subcommand'] == 'upgrade'
assert data['status'] == 'success'
assert data['extra_metadata'] == {}
def test_list_gen_schema_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing list generation metadata.
"""
extra_metadata = {'blacklist':
{'file_size_bytes': 25,
'md5sum': 'd623e56b7c73d27fc7ce68e3dfc6e448',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/blacklist.csv'},
'notification_lists':
[{'file_size_bytes': 37,
'md5sum': '3ac7b8ae8722e47e1ce4b0a01fe8b1e2',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/notifications_operator1.csv'},
{'file_size_bytes': 37,
'md5sum': '3ac7b8ae8722e47e1ce4b0a01fe8b1e2',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/notifications_operator2.csv'},
{'file_size_bytes': 37,
'md5sum': '3ac7b8ae8722e47e1ce4b0a01fe8b1e2',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/notifications_operator3.csv'},
{'file_size_bytes': 37,
'md5sum': '3ac7b8ae8722e47e1ce4b0a01fe8b1e2',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/notifications_operator4.csv'}],
'curr_date': None,
'exception_lists':
[{'file_size_bytes': 11,
'md5sum': 'b9a2f42722d13636dfb6c84e2ee765fe',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/exceptions_operator1.csv'},
{'file_size_bytes': 11,
'md5sum': 'b9a2f42722d13636dfb6c84e2ee765fe',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/exceptions_operator2.csv'},
{'file_size_bytes': 11,
'md5sum': 'b9a2f42722d13636dfb6c84e2ee765fe',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/exceptions_operator3.csv'},
{'file_size_bytes': 11,
'md5sum': 'b9a2f42722d13636dfb6c84e2ee765fe',
'num_records': 0,
'filename': '/workspace/data/20170822_021142/exceptions_operator4.csv'}],
'blocking_conditions':
[{'dimensions':
[{'module': 'gsma_not_found'}],
'grace_period_days': 30,
'sticky': False,
'reason': 'Violated simple dimension',
'max_allowed_matching_ratio': 0.1,
'label': 'simple_dimension',
'blocking': True},
{'dimensions':
[{'module': 'stolen_list'},
{'invert': True,
'parameters':
{'threshold': 3.1,
'period_days': 30},
'module': 'duplicate_daily_avg'}],
'grace_period_days': 0,
'sticky': False,
'reason': 'Violated compound dimension',
'max_allowed_matching_ratio': 0.1,
'label': 'compound_dimension',
'blocking': True}]} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-listgen', run_id=1, subcommand='',
status='success', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-listgen'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-listgen'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
def test_report_schema_json_api(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing report metadata.
"""
extra_metadata = {'refreshed_data': True,
'month': 2,
'output_dir': '/workspace/data',
'year': 2016}
job_metadata_importer(db_conn=db_conn, command='dirbs-report', run_id=1, subcommand='',
status='error', extra_metadata=extra_metadata)
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version)))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-report'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'error'
assert data['extra_metadata'] == extra_metadata
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version)))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-report'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'error'
assert data['extra_metadata'] == extra_metadata
def test_job_metadata_bad_pos_int_params(flask_app, db_conn, api_version):
"""Test Depot ID unknown yet.
Verify that job_metadata API returns a 400 status for not positive integer run_id or max_result,
"""
if api_version == 'v1':
# not numeric run_id
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id='aaa',
status='success',
show_details=False))
assert rv.status_code == 400
assert b'Bad \'run_id\':\'aaa\' argument format. Accepts only integer' in rv.data
# not positive run_id
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=-1,
status='success',
show_details=False))
assert rv.status_code == 400
assert b'Param \'run_id\':\'-1\' must be greater than 0' in rv.data
# not numeric max_result
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=1,
status='success',
max_results='a',
show_details=False))
assert rv.status_code == 400
assert b'Bad \'max_results\':\'a\' argument format. Accepts only integer' in rv.data
# not positive max_result
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=1,
status='success',
max_results=0,
show_details=False))
assert rv.status_code == 400
assert b'Param \'max_results\':\'0\' must be greater than 0' in rv.data
# list of max_result (will take just the first elem of the list)
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=1,
status='success',
max_results=[1, -2],
show_details=False))
assert rv.status_code == 200
# set max_result to 1 and check that only one record is returned
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=1, subcommand='sub_one',
status='success')
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=2, subcommand='sub_two',
status='success')
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
run_id=1,
db_user='test-user',
subcommand=['sub_one', 'sub_two'],
show_details=False,
max_results=1))
assert rv.status_code == 200
assert len(json.loads(rv.data.decode('utf-8'))) == 1
else: # api version 2.0
# not numeric run_id
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-classify',
run_id='aaa',
status='success',
show_details=False))
assert rv.status_code == 400
assert b'Bad \'run_id\':\'aaa\' argument format. Accepts only integer' in rv.data
# not positive run_id
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-classify',
run_id=-1,
status='success',
show_details=False))
assert rv.status_code == 400
assert b'Param \'run_id\':\'-1\' must be greater than 0' in rv.data
# set max_result to 1 and check that only one record is returned
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=1, subcommand='sub_one',
status='success')
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=2, subcommand='sub_two',
status='success')
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
run_id=1,
db_user='test-user',
subcommand=['sub_one', 'sub_two'],
show_details=False,
max_results=1))
assert rv.status_code == 200
assert len(json.loads(rv.data.decode('utf-8'))['jobs']) == 1
def test_job_metadata_bad_params(flask_app, api_version):
"""Test Depot ID unknown yet.
Verify that job_metadata API returns a 400 status for unknown status or not boolean show_details.
"""
if api_version == 'v1':
# unknown status
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version), status='unknown'))
assert rv.status_code == 400
assert b'Bad \'status\':\'unknown\' argument format. ' \
b'Accepts only one of [\'running\', \'success\', \'error\']' in rv.data
# list of status containing an unknown status
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version), status=['error', 'unknown']))
assert rv.status_code == 400
assert b'Bad \'status\':\'unknown\' argument format. ' \
b'Accepts only one of [\'running\', \'success\', \'error\']' in rv.data
# not boolean show_details
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
show_details='not_boolean'))
assert rv.status_code == 400
assert b'Bad \'show_details\':\'not_boolean\' argument format. ' \
b'Accepts only one of [\'0\', \'1\', \'true\', \'false\']' in rv.data
else: # api version 2.0
# unknown status
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version), status='unknown'))
assert rv.status_code == 400
assert b'Bad \'status\':\'unknown\' argument format. ' \
b'Accepts only one of [\'running\', \'success\', \'error\']' in rv.data
# list of status containing an unknown status
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version), status=['error', 'unknown']))
assert rv.status_code == 400
assert b'Bad \'status\':\'unknown\' argument format. ' \
b'Accepts only one of [\'running\', \'success\', \'error\']' in rv.data
# not boolean show_details
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
show_details='not_boolean'))
assert rv.status_code == 400
assert b'Bad \'show_details\':\'not_boolean\' argument format. ' \
b'Accepts only one of [\'0\', \'1\', \'true\', \'false\']' in rv.data
def test_json_show_details(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata
with extra information if show_details is set to true.
"""
extra_metadata = {'matched_imei_counts':
{'compound_dimension': 0,
'simple_dimension': 0}, # noqa E127
'conditions':
[{'label': 'simple_dimension',
'blocking': True,
'sticky': False,
'reason': 'Violated simple dimension',
'max_allowed_matching_ratio': 0.1,
'dimensions':
[{'module': 'gsma_not_found'}],
'grace_period_days': 30},
{'label': 'compound_dimension',
'blocking': True,
'sticky': False,
'reason':
'Violated compound dimension',
'max_allowed_matching_ratio': 0.1,
'dimensions':
[{'module': 'stolen_list'},
{'invert': True,
'module': 'duplicate_daily_avg',
'parameters':
{'period_days': 30,
'threshold': 3.1}}],
'grace_period_days': 0}],
'curr_date': None}
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=1, subcommand='',
status='success', extra_metadata=extra_metadata)
if api_version == 'v1':
# Step 1 show_details=True
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=1,
status='success',
show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
assert data['extra_metadata'] == extra_metadata
# Step 2 show_details=False
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=1,
status='success',
max_results=10,
show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
assert 'extra_metadata' not in data
else: # api version 2.0
# Step 1 show_details=True
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-classify',
run_id=1,
status='success',
show_details=True))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
assert data['extra_metadata'] == extra_metadata
# Step 2 show_details=False
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-classify',
run_id=1,
status='success',
max_results=10,
show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
assert 'extra_metadata' not in data
def test_json_no_record_for_get_params(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata should return an empty JSON if params are well formatted
but not stored in the job_metadata table.
"""
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=1, subcommand='',
status='success', extra_metadata={'metadata': 'metadata'})
if api_version == 'v1':
# Add row into job_metadata table with run_id=1 and get url for param run_id=2.
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=2,
db_user='test-user',
status='success',
max_results=10,
show_details=True))
assert rv.status_code == 200
assert json.loads(rv.data.decode('utf-8')) == []
else: # api version 2.0
# Add row into job_metadata table with run_id=1 and get url for param run_id=2.
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-classify',
run_id=2,
db_user='test-user',
status='success',
max_results=10,
show_details=True))
assert rv.status_code == 200
assert json.loads(rv.data.decode('utf-8'))['jobs'] == []
def test_json_unknown_command_param(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata doesn't allow unknown command params.
"""
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-unknown',
run_id=2,
db_user='test-user',
status='success',
max_results=10,
show_details=True))
assert rv.status_code == 400
assert b'Bad \'command\':\'dirbs-unknown\' argument format. ' \
b'Accepts only one of [\'dirbs-catalog\', \'dirbs-classify\', ' \
b'\'dirbs-db\', \'dirbs-import\', \'dirbs-listgen\', \'dirbs-prune\', \'dirbs-report\']' in rv.data
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-unknown',
run_id=2,
db_user='test-user',
status='success',
show_details=True))
assert rv.status_code == 400
assert b'Bad \'command\':\'dirbs-unknown\' argument format. ' \
b'Accepts only one of [\'dirbs-catalog\', \'dirbs-classify\', ' \
b'\'dirbs-db\', \'dirbs-import\', \'dirbs-listgen\', \'dirbs-prune\', \'dirbs-report\']' in rv.data
def test_json_multiple_values_same_param(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing job metadata if get params
consists of a list of values.
"""
# Step 1 list of valid params: run_id=[1,2]; subcommand=['upgrade', 'operator']
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=1, subcommand='sub_one',
status='success')
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=2, subcommand='sub_two',
status='success')
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
run_id=[1, 2],
db_user='test-user',
subcommand=['sub_one', 'sub_two'],
show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['subcommand'] == 'sub_one'
assert data['run_id'] == 1
data = json.loads(rv.data.decode('utf-8'))[1]
assert data['run_id'] == 2
assert data['subcommand'] == 'sub_two'
# Step 2 list with invalid params: run_id=[1,-2];
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
command='dirbs-classify',
run_id=[1, -2],
db_user='test-user',
subcommand=['sub_one', 'sub_two'],
status=['success', 'error'],
max_results=10,
show_details=False))
assert rv.status_code == 400
assert b'Param \'run_id\':\'-2\' must be greater than 0' in rv.data
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
run_id=[1, 2],
db_user='test-user',
subcommand=['sub_one', 'sub_two'],
show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['subcommand'] == 'sub_one'
assert data['run_id'] == 1
data = json.loads(rv.data.decode('utf-8'))['jobs'][1]
assert data['run_id'] == 2
assert data['subcommand'] == 'sub_two'
# Step 2 list with invalid params: run_id=[1,-2];
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
command='dirbs-classify',
run_id=[1, -2],
db_user='test-user',
subcommand=['sub_one', 'sub_two'],
status=['success', 'error'],
show_details=False))
assert rv.status_code == 400
assert b'Param \'run_id\':\'-2\' must be greater than 0' in rv.data
def test_json_no_run_id_param(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that if run_id is set to empty list, it will not be used to filter the results of the query.
"""
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=1, subcommand='',
status='success')
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version),
run_id=[],
show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version),
run_id=[],
show_details=False))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
def test_default_params(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify that job_metadata returns a JSON containing all job metadata
if no request params are given.
"""
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=1, subcommand='',
status='success')
if api_version == 'v1':
rv = flask_app.get(url_for('{0}.job_metadata_api'.format(api_version)))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))[0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
assert data['extra_metadata'] == {}
else: # api version 2.0
rv = flask_app.get(url_for('{0}.job_metadata_get_api'.format(api_version)))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))['jobs'][0]
assert data['command'] == 'dirbs-classify'
assert data['run_id'] == 1
assert data['subcommand'] == ''
assert data['status'] == 'success'
assert data['extra_metadata'] == {}
def test_method_delete_not_allowed(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify the job_metadata API does not support HTTP DELETE and returns HTTP 405 METHOD NOT ALLOWED.
"""
if api_version == 'v1':
rv = flask_app.delete(url_for('{0}.job_metadata_api'.format(api_version)))
assert rv.status_code == 405
assert b'Method Not Allowed' in rv.data
else: # api version 2.0
rv = flask_app.delete(url_for('{0}.job_metadata_get_api'.format(api_version)))
assert rv.status_code == 405
assert b'Method Not Allowed' in rv.data
def test_method_post_not_allowed(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify the job_metadata API does not support HTTP POST and returns HTTP 405 METHOD NOT ALLOWED.
"""
if api_version == 'v1':
rv = flask_app.delete(url_for('{0}.job_metadata_api'.format(api_version)))
assert rv.status_code == 405
assert b'Method Not Allowed' in rv.data
else: # api version 2.0
rv = flask_app.delete(url_for('{0}.job_metadata_get_api'.format(api_version)))
assert rv.status_code == 405
assert b'Method Not Allowed' in rv.data
def test_method_put_not_allowed(flask_app, db_conn, api_version):
"""Test Depot ID not known yet.
Verify the job_metadata API does not support HTTP PUT and returns HTTP 405 METHOD NOT ALLOWED.
"""
if api_version == 'v1':
rv = flask_app.delete(url_for('{0}.job_metadata_api'.format(api_version)))
assert rv.status_code == 405
assert b'Method Not Allowed' in rv.data
else: # api version 2.0
rv = flask_app.delete(url_for('{0}.job_metadata_get_api'.format(api_version)))
assert rv.status_code == 405
assert b'Method Not Allowed' in rv.data
def test_job_metadata_most_recent_successful_job_start_time(db_conn):
"""Test Depot ID not known yet.
Verify metadata::test_job_metadata_most_recent_successful_job_start_time function.
"""
extra_metadata = {'perform_duplicates_check': True,
'perform_historic_check': True,
'performance_timing': {}} # noqa E127
job_metadata_importer(db_conn=db_conn, command='dirbs-import', run_id=1, subcommand='pairing-list',
status='success', extra_metadata=extra_metadata)
metadata.most_recent_job_start_time_by_command(db_conn, 'dirbs-import', subcommand='pairing-list',
successful_only=True)
def test_job_metadata_v2_pagination(flask_app, db_conn):
"""Test Depot ID not known yet.
Verify that results returned by metadata api version 2.0 are paginated.
"""
# insert 20 records
for i in range(10):
job_metadata_importer(db_conn=db_conn, command='dirbs-classify', run_id=i, subcommand='',
status='success')
job_metadata_importer(db_conn=db_conn, command='dirbs-prune',
run_id=i, subcommand='triplets', status='success')
# test all records are fetched when no pagination params are given
rv = flask_app.get(url_for('v2.job_metadata_get_api'))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
assert data['_keys']['result_size'] == 20
assert data['_keys']['previous_key'] == ''
assert data['_keys']['next_key'] == ''
assert len(data['jobs']) == 20
# test pagination, start from 1st record and 5 records per page
offset = 1
limit = 5
rv = flask_app.get(url_for('v2.job_metadata_get_api', offset=offset, limit=limit))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
assert data['_keys']['result_size'] == 20
assert data['_keys']['previous_key'] == ''
assert data['_keys']['next_key'] == '?offset={0}&limit={1}'.format(offset + limit, limit)
assert len(data['jobs']) == 5
next_offset = offset + limit
rv = flask_app.get(url_for('v2.job_metadata_get_api', offset=next_offset, limit=limit))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
assert data['_keys']['result_size'] == 20
assert data['_keys']['previous_key'] == '?offset={0}&limit={1}'.format(next_offset - limit, limit)
assert data['_keys']['next_key'] == '?offset={0}&limit={1}'.format(next_offset + limit, limit)
next_offset = next_offset + limit
rv = flask_app.get(url_for('v2.job_metadata_get_api', offset=next_offset, limit=limit))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
assert data['_keys']['result_size'] == 20
assert data['_keys']['previous_key'] == '?offset={0}&limit={1}'.format(next_offset - limit, limit * 2)
assert data['_keys']['next_key'] == '?offset={0}&limit={1}'.format(next_offset + limit, limit)
# pagination with sorting order ascending based on run_id
offset = 1
limit = 5
order = 'Ascending'
rv = flask_app.get(url_for('v2.job_metadata_get_api', offset=offset, limit=limit, order=order))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
assert data['_keys']['result_size'] == 20
assert data['_keys']['previous_key'] == ''
assert data['_keys']['next_key'] == '?offset={0}&limit={1}'.format(offset + limit, limit)
assert len(data['jobs']) == 5
assert data['jobs'][0]['run_id'] <= data['jobs'][1]['run_id']
assert data['jobs'][1]['run_id'] <= data['jobs'][2]['run_id']
assert data['jobs'][2]['run_id'] <= data['jobs'][3]['run_id']
assert data['jobs'][3]['run_id'] <= data['jobs'][4]['run_id']
# order Descending
order = 'Descending'
rv = flask_app.get(url_for('v2.job_metadata_get_api', offset=offset, limit=limit, order=order))
assert rv.status_code == 200
data = json.loads(rv.data.decode('utf-8'))
assert data['_keys']['result_size'] == 20
assert data['_keys']['previous_key'] == ''
assert data['_keys']['next_key'] == '?offset={0}&limit={1}'.format(offset + limit, limit)
assert len(data['jobs']) == 5
assert data['jobs'][0]['run_id'] >= data['jobs'][1]['run_id']
assert data['jobs'][1]['run_id'] >= data['jobs'][2]['run_id']
assert data['jobs'][2]['run_id'] >= data['jobs'][3]['run_id']
assert data['jobs'][3]['run_id'] >= data['jobs'][4]['run_id']
| 49.023456
| 120
| 0.54409
| 7,250
| 62,701
| 4.487034
| 0.079862
| 0.051643
| 0.01328
| 0.035966
| 0.84919
| 0.833574
| 0.818389
| 0.801082
| 0.765147
| 0.709385
| 0
| 0.076242
| 0.330393
| 62,701
| 1,278
| 121
| 49.061815
| 0.698583
| 0.108611
| 0
| 0.731375
| 0
| 0
| 0.277447
| 0.114893
| 0
| 0
| 0
| 0
| 0.299056
| 1
| 0.025184
| false
| 0
| 0.06191
| 0
| 0.087093
| 0.001049
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f74b2331ae6f54a214f05ad65cf9982eab7c2c8c
| 397
|
py
|
Python
|
tests/internal/network_performance/test_network_performance_high_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/network_performance/test_network_performance_high_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/network_performance/test_network_performance_high_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module network_performance.high
import pytest
import ec2_compare.internal.network_performance.high
def test_get_internal_data_network_performance_high_get_instances_list():
assert len(ec2_compare.internal.network_performance.high.get_instances_list()) > 0
def test_get_internal_data_network_performance_high_get():
assert len(ec2_compare.internal.network_performance.high.get) > 0
| 39.7
| 84
| 0.866499
| 56
| 397
| 5.696429
| 0.339286
| 0.338558
| 0.413793
| 0.31348
| 0.827586
| 0.827586
| 0.62069
| 0.62069
| 0.62069
| 0
| 0
| 0.013477
| 0.065491
| 397
| 9
| 85
| 44.111111
| 0.846361
| 0.098237
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
f769059a5e4c874ab99a34d9eb3174b001b9a691
| 185
|
py
|
Python
|
readability/functions/__init__.py
|
russelljjarvis/readabilityinscience
|
353d79f11f2380fd4872242397a255a4b1da675c
|
[
"MIT"
] | null | null | null |
readability/functions/__init__.py
|
russelljjarvis/readabilityinscience
|
353d79f11f2380fd4872242397a255a4b1da675c
|
[
"MIT"
] | null | null | null |
readability/functions/__init__.py
|
russelljjarvis/readabilityinscience
|
353d79f11f2380fd4872242397a255a4b1da675c
|
[
"MIT"
] | null | null | null |
import readability.functions.readabilityFunctions
import readability.functions.abstract_cleanup
import readability.functions.convert_id
import readability.functions.dataminingfunctions
| 37
| 49
| 0.913514
| 18
| 185
| 9.277778
| 0.5
| 0.407186
| 0.622754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.043243
| 185
| 4
| 50
| 46.25
| 0.943503
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f76f9994cdf0906665df27cbd7589035059b3d2c
| 92
|
py
|
Python
|
pyogpclient/__init__.py
|
JanHolger/pyogpclient
|
9482b62bfc615b13a60d116fc1b44b482040ee72
|
[
"Apache-2.0"
] | 1
|
2021-04-09T00:47:02.000Z
|
2021-04-09T00:47:02.000Z
|
pyogpclient/__init__.py
|
JanHolger/pyogpclient
|
9482b62bfc615b13a60d116fc1b44b482040ee72
|
[
"Apache-2.0"
] | 1
|
2021-04-09T00:49:53.000Z
|
2021-11-03T22:46:13.000Z
|
pyogpclient/__init__.py
|
JanHolger/pyogpclient
|
9482b62bfc615b13a60d116fc1b44b482040ee72
|
[
"Apache-2.0"
] | null | null | null |
from pyogpclient.OGPClient import OGPClient
from pyogpclient.OGPClient import FULL_OGP_QUERY
| 46
| 48
| 0.902174
| 12
| 92
| 6.75
| 0.583333
| 0.37037
| 0.592593
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076087
| 92
| 2
| 48
| 46
| 0.952941
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f77742b1eb86f4be8120436f9142175da380d2d0
| 144
|
py
|
Python
|
Curso Python Completo - Udemy/py/MeusModulos/ContaCorrente.py
|
Cauenumo/Python
|
6414ee2013c651e9d45cd328a381a476c6c9073b
|
[
"Apache-2.0"
] | null | null | null |
Curso Python Completo - Udemy/py/MeusModulos/ContaCorrente.py
|
Cauenumo/Python
|
6414ee2013c651e9d45cd328a381a476c6c9073b
|
[
"Apache-2.0"
] | null | null | null |
Curso Python Completo - Udemy/py/MeusModulos/ContaCorrente.py
|
Cauenumo/Python
|
6414ee2013c651e9d45cd328a381a476c6c9073b
|
[
"Apache-2.0"
] | null | null | null |
def credit(valor):
return ('Valor créditado R${:.2f}'.format(valor))
def debit(valor):
return('Valor debitado R${:.2f}'.format(valor))
| 24
| 53
| 0.652778
| 20
| 144
| 4.7
| 0.5
| 0.234043
| 0.340426
| 0.297872
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016129
| 0.138889
| 144
| 5
| 54
| 28.8
| 0.741935
| 0
| 0
| 0
| 0
| 0
| 0.326389
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
e3de44e75f51ae9a16633e9c0840f2ccee4b2f56
| 45,879
|
py
|
Python
|
MISSIONS/air_fight/environment/render/render_pic.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
MISSIONS/air_fight/environment/render/render_pic.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
MISSIONS/air_fight/environment/render/render_pic.py
|
Harold0/hmp
|
4745e1d3e56c7f08947c839526e6827daa3e6048
|
[
"MIT"
] | null | null | null |
__pyarmor__(__name__, __file__, b'\xe7\x50\x8c\x64\x26\x42\xd6\x01\xb9\x4e\x3f\xdf\x40\x98\x75\xef\xac\x7b\x63\x36\xc5\x93\x13\xb9\x13\xf2\x37\xbc\x69\x4b\xee\x5c\xf3\x05\x68\xf4\xdb\x02\xa7\xe1\x62\x41\xc3\x96\x29\x77\x39\x2e\xc7\xfa\xe4\xcd\x9c\x0a\x29\x4a\x96\x13\x30\xd0\x66\x6c\x67\xf8\xe1\xfa\x91\x6e\x7e\x4e\x73\x74\x58\x1d\x6a\x81\x29\x1c\x64\x47\x42\x71\x79\x9c\xd8\xc8\xf3\xdd\xd6\x71\xba\x4c\x97\x5e\xdc\x0d\x47\x0b\x81\x64\x77\x5a\x24\x00\x0b\x15\x78\x86\xdd\xe3\xe0\x2d\xef\xa7\x08\x5d\x19\x1e\x33\x5a\xc2\x8a\xa7\x06\xda\x8d\xb0\xc0\x15\xba\xc0\xcc\xc7\x3a\xd2\x80\x2c\x9c\xb1\x50\x82\x2f\xa0\x0c\xc4\xb9\xe5\x35\x75\x59\x59\xbc\xfd\x76\x8f\x80\x9f\x91\x77\x42\x35\x78\xb4\xee\x09\xc5\xb6\x65\xd7\xcd\x6d\xf7\x2e\xa1\x45\x7f\x78\x5e\xcc\x2a\x2c\x3a\xc8\xe1\x58\xd7\x15\xd1\x50\x9d\x0d\xd9\x71\x80\xda\x9c\x99\xeb\xba\x15\x45\x4f\x85\xbb\xfa\x88\xb4\x70\xa9\x5b\x65\xac\xc5\x76\xb2\x12\xeb\xf2\x41\xc8\xbf\xd9\x17\x35\x53\x3a\x58\xaa\x02\x79\x2b\x1b\x06\xad\xe5\xfe\xef\x94\x41\x22\xa3\x25\xb1\xab\xfb\x2f\xa9\xa2\xff\xcb\x1a\x73\xaf\x2d\x97\xee\xda\x81\x4b\x26\x26\x8e\xaa\x6f\xe8\x9b\x74\x5b\x4a\xd3\xd5\x53\x1d\xad\x63\x23\x97\x3a\xb6\x01\xf8\x8a\x7d\xc3\xfa\x11\x8b\x3c\x97\xf5\x7b\x1b\x3f\x27\x3c\x5b\x8d\xa3\x87\x45\x9a\x42\x6f\x87\x70\xd9\x87\x9b\x2c\x77\x7a\xaa\x77\x30\x08\x23\x51\xcb\x0c\x1d\x17\x96\x02\xf1\x9f\xbe\x0e\x72\x78\x8c\x5d\xfb\x6a\x42\xbb\x0e\x08\x9d\xdf\xea\xd6\x69\x3b\xd4\x99\xbc\x1d\x5c\xd0\x63\x07\xf9\x70\xa7\x76\xbb\xd2\xa8\x55\xcb\xba\x2e\xab\xd3\xd5\x1f\x6f\x3b\x08\xb6\xde\x1d\xdf\xcb\x12\x5e\xa0\x8a\xf3\xf4\x01\x66\xcf\x6e\x92\x6a\x66\x75\xda\xa3\x95\x20\xf2\x10\xc9\xc9\x44\xdd\x47\x4b\xd8\xe4\x2f\x27\xb3\x78\x8b\xe9\x07\x1d\x7f\xa0\xac\xb1\x82\x27\xf3\x01\x7d\x51\x31\x4f\x68\xff\x44\x10\xc0\xdb\x39\x8e\xa1\x7d\x0d\x5c\x14\x35\xa5\x22\xa7\x8b\x69\xf9\xe8\xc9\xd2\x26\xd0\xbc\xde\x33\xc6\x8a\xee\xc2\x55\xa8\x8c\xea\x26\x50\x23\xce\x47\x2e\x80\x2d\xcc\xe4\xe1\xf7\xec\x13\x00\xb1\x49\x81\x41\xea\x92\xc1\x50\x20\x11\x8e\x1d\x4c\x72\xe2\xa0\x1a\x41\x3f\xac\xec\x3b\x60\x03\xe3\xb1\x8f\x92\xb4\xd9\xfa\x38\x86\x2e\x9c\x44\x37\xe4\xe1\xab\x6b\xc8\x49\x96\xb9\x98\x04\x83\xc9\xbc\x1e\xa6\x53\xa1\x02\x36\xa5\xad\x7a\x28\x0b\x9c\x59\x19\x94\xa0\x81\x08\x82\xcc\x25\x21\x46\xdf\xa6\xfc\x71\x8b\x93\x3a\x60\xf9\xd9\xa0\xef\x8e\x3a\x4e\x66\x26\x62\x3c\xb4\xb9\xaa\xd9\x38\x93\xee\x5b\xc0\x40\xc5\xdb\xd7\x93\xe9\x8d\x28\x79\x19\x63\xef\xeb\x93\x2c\xcb\x4a\xb9\xc2\xd5\x41\x39\xd9\xfa\x3f\x07\x2b\xe6\xf2\xeb\xdd\x17\xd4\xf8\xc1\xe8\xbc\x96\xef\x8e\xc8\x18\x29\x4b\x83\xc1\xe1\xa3\x5a\xad\xa9\x31\x5d\x49\x59\x93\xd5\x0d\x86\xc9\x6b\x34\x53\x93\x16\x49\xfd\xd8\x6e\x17\xe1\xda\x79\x14\xf4\x58\x19\x07\x0a\xa2\x29\xf1\x3a\xf3\x78\x1c\xe4\x45\x79\x77\xd9\xf2\x68\xc8\x0a\x1a\x87\x37\x1d\x54\xe6\x4a\x85\x66\xec\x9e\x56\xf2\x1b\x66\x7f\x2e\x88\xaf\x28\x6b\x29\x14\x33\x44\x71\x4d\xc3\x1d\xbd\x71\xf4\x7a\x90\x42\xf0\xa9\x7f\x0a\x83\xa9\xfa\xdc\xf2\x4d\xad\x5e\x4d\x1e\x6f\x80\x7e\x3e\x2b\x0c\x62\xa5\xe4\xad\x52\xac\x09\x4f\xf5\x9a\xdc\x2c\x02\xad\xd8\x17\x02\x4d\x81\x44\xd4\x5c\x0d\x4f\xb8\x7d\x4f\x89\x92\x03\xcf\x3b\xfb\x56\x2a\xe5\x86\x75\xca\x27\x74\x2b\xde\x7c\x3a\xe6\xec\x8a\x9e\x9f\xf5\x48\x0d\x51\x2d\x52\x78\x46\xa9\x30\x4f\x5e\x74\x8d\x42\x74\x15\xb6\xac\x39\x7d\x93\x17\x22\xb9\x39\x66\xd9\x41\x2f\x79\xa1\x65\x2c\xb9\x04\x63\x70\x42\x4c\x57\x9a\x14\x64\xb8\x97\xa7\xca\x98\xca\xdc\xef\xff\xb7\x5f\xe7\x3b\x95\x38\x24\x65\xf2\xc8\xc8\xad\x15\x02\x48\xb7\xb7\xb2\x0c\x95\x82\xb3\x42\x59\xd9\x90\xb7\xdf\x33\xbe\xa0\xd2\xf8\x3c\x86\x90\x3c\x13\x12\xc6\xdc\x9e\x15\x81\xd1\x89\x62\x5b\xc5\xb6\x32\x34\x4c\xc6\x1c\xf4\x93\x39\x61\x0b\x1a\x51\x59\x25\xdf\xac\xe2\x3f\x22\x11\x66\xa0\xef\x57\x30\xca\x11\xe3\x8d\x44\x71\xa8\x35\xb8\x8b\x50\x63\xfa\x59\x6a\x4c\xeb\xf9\xa8\x6d\x19\xd5\x76\x39\xfb\x48\xc1\xbf\x1c\x1b\xda\x5e\x10\x49\xc6\xd1\x32\xc1\x94\x0e\xc1\x9e\xcc\xa2\xc3\xec\xcf\x0e\x0a\xb0\xb6\xdd\x15\x4b\x06\xcf\xc4\xfc\x80\xf9\x5d\x84\xcb\x4d\xa2\x76\xc3\x8b\x11\x16\xa8\xe7\xff\x8e\xc1\x3c\x22\x1d\xde\x21\x28\xc0\x48\xd1\x6d\x62\x4a\x9e\x02\x96\x42\x7e\xfe\x60\xec\x05\x7c\x36\x32\x07\x81\x46\xb9\x29\x1d\x3a\xc7\x5d\xf6\x9c\x32\xf1\x9e\x14\x59\x62\x82\x0d\xc5\x6b\x58\x3d\x12\x35\x6c\x60\xdb\x40\xe3\x79\x6f\x50\x05\xcc\x82\xe7\x26\x50\xfa\x78\xeb\xb8\x55\x10\x18\x62\x89\xe8\x09\xb2\x1e\x67\x02\x9e\x98\xe0\x7d\xf9\x32\x7a\xdb\x0d\x5d\x3e\x5b\x0b\x0b\xa0\x36\x4f\x81\x81\x2f\xfc\x2d\x59\x88\x4a\xbd\x44\x45\x62\xab\x0a\x1b\xfa\xcc\x31\x4e\xe4\xea\x6e\x33\x1c\x4c\xbd\xfa\xa7\xb2\x80\x3d\x08\x8c\x11\x12\xe5\x32\x28\x94\x40\xfd\xf0\xdc\x03\x3f\xd9\xb5\x98\x36\xe7\xc3\xea\xfc\x90\x83\x1b\xd5\xd6\xc5\x44\x91\x3f\x1d\x6f\x96\xc9\xbe\x2a\x05\xf7\x74\xe2\xda\xe7\x92\x15\xed\x4c\x3c\x79\x3c\x3b\xdd\xfd\x8f\xce\x5b\xf2\xb3\x0c\x44\x42\xdf\x55\x67\x8a\xbd\x11\x5e\xbe\xe8\x68\x38\x9b\x0f\x3d\x3c\xe9\x72\x25\xed\x0f\x54\x2c\x0b\x49\x9f\x76\x02\x7a\x0f\x25\xb4\xd7\x0e\xc0\x3c\x65\xbb\x27\x59\xa7\xd4\x9c\x79\x25\x9f\xee\xdc\xaf\xe3\x3a\xee\x0f\xf7\x1b\x1a\x84\x48\xa1\xc7\x67\xe1\xfa\x41\x9a\x3b\xbf\x1e\x6a\x30\x26\xec\x83\x6a\x92\x3d\x32\x27\xfd\x53\xd4\xca\x1a\x37\xac\x6a\x49\xb2\xb2\xa6\x4a\x30\x8e\x45\x69\xa3\xa6\xa2\x87\xf5\x8a\x3b\xf5\x82\xb1\x8e\xf4\x83\x5f\xc6\xd8\x24\x2f\x0e\x52\x48\xe8\x81\x39\x76\x22\xf9\x78\x29\x15\x6d\x46\x6d\x4c\x01\xb9\x59\x60\x8e\x9a\x9f\x5a\xb2\x99\xc5\x7c\x92\x79\x91\x7b\x3f\xba\x68\x79\x1a\x00\xf8\x79\xf3\x63\x4a\xf1\xfd\x03\xc7\x2e\x43\xd7\xf2\xd2\xb9\x3e\x93\x0d\x17\x0e\x86\x8c\x38\x93\x47\x5f\x3e\x2d\x81\x92\x04\xa4\x17\xe2\xeb\xfd\xf5\x5d\xfc\xfc\x1b\x2f\xe8\x60\x1d\x29\x9d\xbe\x33\xf7\x3f\x98\x44\xce\x8f\x15\x5a\x91\x18\x24\x4e\xa9\x32\xff\x4a\x45\xd3\xe2\xa2\x5b\xfb\x90\xb5\xb2\x29\x87\x44\x56\xd5\x98\xbd\x8b\xe2\x41\xf5\xd2\xf4\xb7\x26\xd2\x61\xec\x29\xb8\xb6\xd0\xf2\xa6\x1a\xd0\x8e\xe7\x4c\xb8\xab\x01\x86\x4c\x1a\xce\x9e\xb6\xa4\x69\xff\xcb\xd8\x60\xd9\xaf\xc1\x06\x33\x76\x23\xc8\x63\xe3\xd0\x6d\xaf\x23\xef\xa9\xd8\x85\xdb\x99\x7c\x02\x67\xba\x68\xfe\x2b\x4b\xa3\x4d\xf6\x27\x25\x53\xf1\xb0\x14\x7e\x6b\x8e\x02\x71\xa5\x09\xe8\x27\xd7\x5f\x48\xae\xb9\x13\x81\x5b\xfa\xa3\x97\xdd\xfd\x1b\x5b\xa8\xf7\x13\x9b\xf6\x7a\x98\xab\xa5\x3f\x68\x5c\xa2\x5c\x09\x70\x8a\x46\x31\x87\xca\x01\xd9\x88\xec\xe2\x46\x0f\xc6\xb2\x8e\xf9\x52\xb4\xe0\x60\xb2\xc2\x7e\x14\x93\x1c\x6e\x6b\xa5\xb6\x75\x1e\xae\xc5\x16\x4a\xaf\xc7\x85\xff\x84\xec\xca\x80\xce\xa8\x78\x1d\xdc\x43\x3f\xc2\x99\xa5\x51\x0b\x5f\xb5\xeb\x99\x0d\x91\xa4\x67\xdb\x28\xb9\x28\x45\x6d\x25\x55\x4e\x4b\xe3\xe9\x38\x1e\x2d\xe9\x96\xd4\xff\x95\x09\x15\xa9\xe6\x1d\xd9\x0d\x9a\xec\x58\x65\x38\xf4\xfc\x13\xef\xf6\xea\xc5\x9c\x4a\x07\x09\x28\xa0\x63\x23\x9e\x12\x78\x36\x51\x20\x8e\x0c\x56\xe3\x9c\xce\xb8\x9e\xe7\x06\x3d\x39\xe2\xe5\x66\xe5\x7d\x99\xa4\x69\x16\xbc\x49\x02\x06\x4e\x3a\xf3\x15\x91\xff\x16\x0b\x11\x16\x99\x82\x72\xf6\x74\xb7\x9d\x27\xfd\x44\xb3\x69\xd7\xf6\x3d\xc6\xc9\x09\xde\x84\xc1\x11\x28\xa2\x82\x31\x97\x4d\x41\xcf\x14\x8c\xa1\xcb\xb3\x6d\xb7\xc9\x0b\xbd\x5f\x69\xf0\x49\x32\x08\x8c\x00\x4c\xab\xcc\xe7\x54\x52\x67\xd8\xa5\x4f\x22\x66\x3f\x39\xf3\x6a\x6c\x3e\x2a\x7e\xd4\x13\x93\x7b\xad\x77\x0a\x5c\xb2\x13\x7d\x7a\x17\x03\xf2\x1d\xa1\x29\x49\xb3\x23\x2e\x54\xc7\x33\x52\x12\xa8\xae\x4e\x7c\xdb\x0e\xaa\x0b\x61\xe4\x50\x21\x94\xaf\xa8\x09\x90\x20\x9c\x9f\xcf\x49\x09\x78\x3d\x1d\x77\x46\x95\xc3\x2d\xb5\x1a\xfc\x5e\x9a\xde\xfb\x4e\xf5\x57\x5c\x50\x8c\xcc\xe3\x83\x23\xba\x46\x3c\x21\x6c\xb4\x79\xfd\x56\xff\x4f\x4d\x87\x98\xa6\x5b\xcb\x1a\xf4\x29\x81\x3b\x48\xad\x83\x35\x47\xa5\xd0\xb6\x9a\xd8\x5e\xc9\xad\x53\xe5\x63\xa0\x21\xd8\x9d\x3b\xee\x25\x4e\xad\x5c\xb8\xd5\x42\xba\x02\x09\x02\xca\x90\x5f\xc6\x7d\x2f\x9e\x07\x9a\x9f\x5c\xcc\x51\xd5\x52\x86\xa4\x42\xb8\xf9\xd5\x34\x2a\x9d\xe1\xbc\xf0\x0f\x96\x63\x71\xfb\xf6\x22\x53\x4c\x4e\x98\x98\xcc\xa9\xb7\x91\xeb\xb4\xb6\x9e\x2e\x79\x16\x36\x7f\xb6\x74\xf5\x8a\x27\x1f\xb5\x4d\x63\x67\x2c\xa0\xce\xc9\xa8\xb7\x71\x3a\x83\x1c\x47\xc3\x6f\x67\x7b\xfb\xa8\xd6\x2a\x65\x89\xff\x04\x74\x0d\x34\x5c\x63\xf6\xae\xcf\xeb\x03\xa8\x2b\x1e\x1d\xf2\xdd\xac\x98\x47\xaf\xce\xd4\xc8\xef\xf1\xc7\x12\x20\xb2\x87\x82\x26\x66\x6d\x5c\x0f\x7d\x6f\x88\x68\x94\x7a\xac\x5a\xd9\x3c\x90\x86\xbe\x10\x53\xa5\x54\x0d\x3b\x23\xd8\x57\x97\x38\x59\x87\x49\x5c\x9b\x0c\x49\xc4\xb6\xf1\xe1\xb4\xee\xcf\xaa\x52\x25\x29\x28\x2c\x0d\x46\x62\x53\x35\xab\x94\x13\x6d\x68\xb5\x00\x87\xb8\x64\x6c\x6b\x35\xa6\x88\x73\x46\x6a\xce\xfe\xa0\xd2\x15\xc1\xce\x81\x01\xa0\x1c\x4a\x61\xfb\x1d\xe5\xe2\xf2\x92\xf8\xe0\x76\xab\x6b\x69\xa7\xf5\x27\x6a\xf7\x29\x7a\xcc\xf6\x22\xc4\xd3\x3d\x96\x51\x87\xb9\xe1\x60\x55\x0c\x81\x88\x2d\x92\x2f\x93\xd6\xd1\x87\x18\xae\x14\xbd\xbd\x09\x7b\xd4\xb6\xfc\x59\x30\xe1\x40\x03\xe2\x70\xaf\x3b\x68\x93\x7e\x72\x6d\x9b\x92\x8d\x37\x2a\xf2\xb0\xca\x77\xcf\x01\x8e\x6c\x81\xf9\x18\x29\x16\x5d\x60\x23\x8d\x4a\x98\x90\x06\xfa\x5a\xd7\xab\x61\x2a\xb4\x52\x00\x02\x09\x13\x63\x22\x04\xce\x99\x94\x8b\x03\x00\xc0\x72\x2b\xba\x0c\x25\xda\x98\x66\x2c\xc7\xd9\x5c\x5b\xd8\x0f\x5d\xf4\xf1\xf7\x20\x3e\x10\x5b\xfd\x4f\x8f\x77\x74\x6a\x78\x2d\xe7\x16\x22\x71\xda\x8e\x8c\xa9\x0f\x3e\x46\xae\xb2\x51\xdc\x6d\xb1\x13\x13\xf8\xad\x0e\xc3\x19\x40\xb0\xd2\x80\x37\x96\xf8\x7a\xd6\xc9\x3d\x2d\x21\x3c\x0f\x3e\x7b\x1e\x96\x3c\x89\x29\xf9\xaf\xcd\x7f\xe6\x46\x52\x9c\x54\x73\xf2\x60\x9d\x15\xdd\xca\x06\xd1\xf2\x7f\x9b\xc8\x20\xac\x65\x35\xd6\x2b\xd3\x57\xbf\x9c\xd6\x85\x56\x2d\x2a\x71\x69\x13\x40\x3b\x8d\x50\x92\x10\x2d\xd0\xda\x61\xbc\xa4\x8f\x88\x48\x68\xf5\x4e\xb6\x1c\x09\xda\x7b\x22\x05\xc3\xfa\xfc\x16\xab\x06\x86\x85\xfe\x8a\x2e\xa5\x5b\x1d\x89\xab\xb8\x8a\x6c\x73\x66\xb8\xf9\x64\xa1\xe2\x59\xb9\x6a\xdc\x28\x3e\x30\x45\x94\x0f\x6d\x25\xd0\x53\xb5\xb6\xcf\xe2\x34\x5a\x5f\x40\x83\x50\x9c\x69\xdb\x03\x45\xb3\x85\xa0\xea\xad\xe3\xad\x62\x42\xe8\x4a\xb3\xcd\xfd\xe9\x06\x0c\x40\x10\x88\x1f\x07\xfc\x3b\x86\xf9\xae\x5e\xbb\x04\xb7\x33\x9f\x18\xc6\xd4\xe7\x1a\x5d\x32\x37\x7d\xfb\x43\xc4\x41\x81\x53\x99\x9d\xd0\x41\x41\x05\x80\x73\x89\xe2\x83\xec\x81\xd8\x73\x80\x64\x03\xde\xfb\xe5\xc3\xfb\xd3\x80\x50\xa1\x75\x68\x56\x33\x35\xc5\x07\xf5\x2c\x80\x3a\xba\x4e\x7a\xab\x25\x42\xf3\xaf\x98\x2a\x8d\xb5\x50\x46\x5b\xd9\x1c\x40\xc8\x9a\x84\xe6\xef\x2f\xcc\x74\xc0\x84\xed\x2e\x2b\x4f\xe8\xf1\xb0\x8e\x6a\x7b\xab\xfa\x31\x92\x63\x9c\x91\x98\x4d\x41\x73\xf9\xd0\x41\x7e\xbd\xb0\x0e\x8c\xc0\xcc\xa3\x7e\x48\x30\xbe\x5f\x32\xc9\x25\x82\x85\x46\x3f\x08\xb2\xe2\xa4\xab\x35\xd3\xa4\xf6\xa0\x28\x4c\x77\x63\xc0\x80\x9a\x53\x43\x11\x45\x91\x17\xd0\x57\x30\xfc\x5e\x02\x47\xbc\xbf\x98\x1d\xb6\x2f\x7f\x34\xae\x36\x3e\x24\x62\x66\x92\xb1\x3e\xaa\x73\x8f\xbd\x43\xcb\x2c\x07\x1d\x4a\xef\xe3\x94\xe0\x66\xb1\x88\x21\xf4\x1a\x93\x39\xbb\xb0\xe2\x8a\x8b\x3c\xf6\x0a\x41\x71\x78\x40\xfd\x3b\xeb\x44\x79\x6e\x7a\x5f\x36\x7c\xf2\xf4\xfb\x37\xab\x81\x87\xec\x26\xab\xb8\x3b\xd0\x06\x73\x65\x84\x9a\xe4\x12\xab\x8a\x90\x7d\xf8\x0d\xff\x01\xff\x84\x26\xb3\x78\x8a\x10\xcb\x49\x75\xef\x03\x1a\x1e\xc9\x64\xbc\x14\x86\x15\xee\xc8\xf3\x57\xba\x49\xba\xc9\xd7\xf2\xd1\x3f\xa6\x33\xc7\x82\x1e\xdb\x36\xd5\x58\x12\x85\x59\x0a\x89\x64\xb5\x89\x1e\x4e\x6e\x7d\x3c\x07\x42\x40\xf1\x04\x98\x23\xfa\xd8\xb6\x62\xb4\xde\x8d\xd4\x59\x20\xbb\xa5\x19\xd3\x8d\xcf\xdb\x46\x64\x2c\xb2\x7b\xf2\x24\xdb\x87\xad\xae\xc9\x49\xe1\x69\xf8\x0a\x64\xda\x47\xdf\x88\xc4\xf4\xd1\xd4\x76\xba\xc3\x17\x53\x7c\x2c\xb9\xfe\x27\x52\x32\x89\x86\x9e\x30\x05\xd5\x2e\x59\x5a\x85\xcd\x99\x49\x70\x81\xe1\xbf\xee\xcc\x5d\x3c\x2f\xed\xd5\x79\x38\x04\xa9\x25\x93\xae\x54\xc8\xaa\x1f\xfc\x8c\xab\xff\x2a\x45\x4a\xdc\x82\x4e\x81\xee\xe4\x67\xc2\x7c\xef\xc5\x0b\x2c\x93\xc6\x2a\x5d\xdf\x11\x57\x8d\xd6\x58\x2d\x9a\x2f\x97\xef\x33\x22\xbc\x33\x39\x49\xba\x50\xeb\x52\x7c\x67\x7d\x2a\x4b\x54\x4c\xbd\xfe\xee\x5e\x9f\x55\x3a\x54\xcd\x09\xb0\x68\x77\x0f\x7a\xb8\x27\xad\xe7\xc6\x49\xe6\x3a\x5c\xc4\x95\xba\xbd\x2e\x57\x79\x1c\xf3\xaf\x93\x56\x58\x0a\x8c\x11\x0c\x23\x58\x28\xa0\x94\x55\x44\x92\x92\xb8\x56\x4f\x0d\xe0\x5f\xb7\x70\x67\x60\xb1\xba\x95\x9c\x66\xcc\xa0\xcb\x29\x77\xbb\xba\xb2\x59\x79\x9d\xec\xd9\x75\x26\xff\x59\x4b\x19\xaa\x1d\xde\xb6\xbd\x15\x00\xee\x1b\xd3\x57\x11\xf9\x90\xd3\x8f\xbb\x51\x1d\x86\x4f\x5d\xbb\xde\x74\xe4\xa6\xc7\x29\xc4\x1b\xc8\x5b\xc5\xf0\x4b\x99\xd3\x00\x4d\xe8\x1c\xe0\xaa\x53\x16\x36\x91\xe5\x7e\x2b\xed\x94\x7f\xdb\xe8\x5b\x0c\x69\x85\x90\xcc\xca\x3a\x40\xae\x1f\x27\xf7\x13\xd4\xd3\xbc\x18\xf6\xb7\xd2\x38\xd0\xb4\x2a\xf7\x59\x82\x2b\x96\xb3\x15\x81\xb3\x09\xe0\xf0\x3a\xcc\x0a\xc2\x17\x46\x92\xd5\x4c\xbf\xc0\x01\xba\x3c\x85\x8f\x29\xde\xb1\x0b\x28\xd7\x21\x4b\x6f\x1a\x3f\x17\x6c\x11\xc7\x11\x3b\x14\x90\xb1\x70\x0f\xac\x00\x34\x49\x93\x83\x44\xf9\xa0\x51\xd8\xc8\xa2\x40\xf3\xcd\xf8\x1c\xba\xbb\xf6\xd8\x1b\xb7\x35\xc3\x49\xde\x1a\xcb\x6a\x83\x45\x95\xce\xb5\x9e\x85\x11\x3f\xf4\xf9\x2a\x11\x0d\x6a\x98\xb4\x44\x51\x13\x18\xdb\x72\xa1\x79\xbd\x7f\x4b\x3f\xaa\x08\xe5\xa4\xfd\x8b\xcf\x36\xfa\x1c\x88\x0c\xbb\x57\xdd\x1d\x9b\x32\xf3\x1e\x49\x5c\x7d\x7b\x6d\x52\xcb\x0b\x16\xe7\x4e\x3a\x96\x07\x8d\x40\x74\xe3\x54\xae\xc2\x87\x2b\x14\x6f\x09\x50\x9f\x11\xf8\xa0\x10\x96\x7b\xf6\x9d\x0f\x99\x1f\x41\x5a\xf2\x2e\x0e\x0c\x14\x86\xca\x33\xb6\x0f\x38\xe1\xb1\x5c\x9f\x84\x97\xb0\xf8\x85\xa7\x16\x52\xa0\xb9\xd1\xd4\x2a\x02\x08\xfd\x44\xbb\x01\x38\x87\x2d\xa4\xdf\x0a\xef\x98\xfd\x1e\x80\x7f\xb3\x3e\xb4\xa1\x1d\x31\xe1\x31\x17\x4f\x72\x40\x2c\x21\x34\x33\xa3\x0d\x8b\xa9\xfa\x1e\xec\x2b\x78\x97\x86\x51\x4d\xe3\x99\xea\xd8\xd7\xc6\xb2\xe7\xaf\x16\x6a\xdc\x24\xd7\x52\x27\x6b\xdc\x03\x7a\x88\xb3\x03\x6d\xac\x03\xe6\xd4\xd8\x64\x39\xdf\x61\x2c\x1c\x53\xc6\x94\xd7\xad\x2f\xec\x5f\x9b\x9e\xd3\xe3\x92\xe3\x27\x24\x38\xc6\x26\x29\x35\x5f\xaa\xef\x61\xec\x54\xc1\x28\x54\x8c\x35\xb2\x54\xa6\x4f\xf2\xae\xfb\x73\x59\x37\xa7\x43\xdf\x3b\x09\xf5\x56\xf4\x6e\x7d\x05\x90\xdd\x15\x79\x9a\xc8\xd7\x26\x95\xf3\x8d\x69\x82\x8c\x4b\xa5\xc2\xd0\x61\x11\xeb\x28\x22\x18\xf2\x05\xbd\xc9\x3c\xa9\x1d\xa2\xd5\x04\x0c\xec\x4e\x15\xe8\x6b\xf6\xb8\xb7\xf7\x66\x84\xdc\x1a\x0a\x9e\x0a\xaa\xf1\xa4\xa3\x64\xca\x7c\xfd\xa8\x5d\x25\xd9\x78\xb5\xbd\xff\xff\xbd\x94\x65\x31\xff\x72\x56\xe7\xdf\x5e\xb3\x25\x9e\x86\xc9\xa9\xc5\xbe\x4e\xa4\xa8\xba\xaa\x75\x3d\xfb\x60\x0d\xf7\x43\x1a\x9d\x14\x64\xf6\x8b\xb1\x0f\xe2\xaa\x85\xfc\xb8\x9c\xf6\xc9\xab\x9f\x35\x2a\xbd\x69\xde\x37\x82\x27\xbd\x31\x62\xa7\xd1\x13\x1a\xa0\x63\x80\x8f\x34\x04\xea\x60\x16\xdf\xad\x2d\x35\x68\x9b\x6b\xda\x1a\xde\x0b\xbe\x8e\xad\x8b\x39\xc7\xdb\xf2\xb2\x13\x8a\x7a\x19\x97\xec\x00\xb4\x3b\xa4\x30\xf9\x8c\xb3\xb3\xd6\x98\x89\x0e\xb4\x18\xd7\x80\x3e\xf8\x11\xe1\x1b\x56\x56\x27\xe1\x3d\x10\xae\xe0\x53\x2d\xe8\xa4\xcf\x75\x1e\x92\x8b\x97\xa0\xaf\x35\xa9\x90\xab\xa1\xb3\x58\x72\xa7\x55\x7d\x16\x3a\xa8\x61\xca\xf9\x57\x71\x1a\xf9\x4a\xc9\xd8\xd6\x5a\xe0\x56\xe8\xee\x04\x80\x5b\x1e\x16\x67\x01\x90\x7c\x09\xc2\xc2\x5a\xfa\xa2\x8b\xb2\x8d\x70\x5f\x09\x0e\xf6\xfc\x1e\xa5\xe6\x2e\xae\x33\x87\x83\xea\x95\x9b\xd2\xbd\xe1\x46\x4f\xec\x19\xd9\x63\x17\xf0\xfb\xce\x76\xe9\x23\x7e\xdf\x62\x75\x81\xdc\xea\xc5\x16\x01\x70\xd8\xc6\x6f\x6e\xfd\x5f\xf7\xda\x37\xd4\xab\x71\xab\x70\x92\x52\xf6\x77\x87\x92\xb7\x63\x02\xe9\x74\x9a\x71\x48\xab\x29\xcf\x10\x16\x7e\xed\x04\x09\x45\x7f\x45\x38\x76\x0a\x93\x9a\xa6\xaa\xe4\xfe\x0c\xd8\x56\x4c\x28\xf7\x05\x8c\xe6\xd0\x6b\xc1\x6f\x25\x59\x80\xf6\xd0\xe3\x28\xdd\x53\xdc\x16\x11\xba\x84\xb1\xc3\x4e\x7f\x46\x77\x35\xee\x1e\x08\x30\xe4\x9a\x32\x34\x2b\xf5\xef\xbd\x6e\xd7\x75\x0b\x73\x22\xc8\x78\x6d\xff\xe3\x71\xc9\x6b\x6c\xc4\x26\xf6\x9c\xb4\xc9\x8b\x9c\x76\x43\xd9\x12\x77\x4e\x6e\x65\xa4\xaf\xb3\x2b\xa0\x13\xc4\xce\x5d\x36\xd7\x50\xa4\x46\x45\xe6\x21\x6b\xba\xcd\x51\xb8\xce\x7a\xf0\x0e\xaa\x76\x18\x57\x30\xaf\x30\x07\xb4\xb2\xe8\xc5\x84\x97\x60\xd3\xf1\xb9\xdd\x5e\x2f\x21\xb2\x90\x01\x07\x52\xcc\xed\xc5\xbf\x32\x95\x20\x55\x5e\xe8\x4a\x5b\x39\xfe\xd5\x0d\x85\xae\x50\xff\x00\x53\xeb\xa2\xc0\xe3\x20\x55\xd6\xe0\xf6\xbb\x95\x71\x01\x23\x3d\x20\x6a\x2c\x2d\xb1\xd1\x5e\x44\x10\x67\x26\xc9\x91\x3a\x1a\xcc\x23\x05\xc9\x8c\xfc\xb8\x8d\xc0\xc0\xfd\x4d\xc7\x7e\x70\xec\x45\x0f\x63\xe1\x68\xff\x55\xdb\x34\xf4\x44\x60\x29\x52\xc0\x8a\x07\xc0\x64\x18\x51\x94\x70\x74\x08\x17\x9a\x0d\x02\xe2\xfa\x2a\xbe\x3e\xf9\x6e\xfd\x6f\xf5\x07\x84\xd0\xc7\x58\x29\xf7\x93\x46\x14\x88\x46\xe7\x4f\xc4\xe1\x6d\x7d\xf8\x25\x67\x94\x17\xc6\xad\xab\x36\x9c\x24\x6e\x77\x2b\x6b\x90\x65\x14\x3c\x54\x7d\x01\x4b\x61\x9c\x11\x4b\xa7\xc2\xce\xe2\x7d\xf7\x55\x75\x3d\x0b\x6f\xe7\x56\xac\xb4\xab\x73\x85\x09\x1d\x3f\xec\xba\xd2\xff\xb3\x54\x24\x06\x0e\xc3\xd7\x3a\x12\x98\x05\x0f\xff\xb4\x20\xba\x4f\xad\x78\x50\x4d\x4e\xb1\x3d\xa1\xba\x03\x6f\x79\x06\x87\x1c\x6f\x0b\x42\x67\x90\x94\x20\x15\xa0\xb4\x0c\xc4\x52\x23\x59\xb0\x12\x1e\x63\xd7\x09\x0c\x60\x21\x87\xf1\xa1\xa3\x76\xb5\x0b\xc7\xe0\x81\xe4\x13\xb7\x25\x44\x9c\xcb\x6a\xca\xa5\x2c\xdb\xad\x1a\xb7\x4d\xca\x8f\x96\xd2\x5b\xb3\xe5\x03\x4a\x27\x69\x88\x12\x75\x03\xfc\x9a\x7b\x1a\x8b\x63\x71\x52\xa8\x0b\x42\xd1\x37\x15\x44\x84\x90\x02\xd8\x00\xd8\x00\x1b\xed\xb2\x69\x55\xbc\x0d\x15\x13\x55\x72\xa7\x76\xb4\xe1\xfe\xdc\x66\x2c\xc6\x03\x72\x7f\x8f\x62\xec\xc0\xe8\xa4\x46\x93\x2e\x18\x47\xef\x77\x3e\x59\x11\xcc\x71\xbe\xff\xd3\xa6\xe5\xcb\xcc\xa0\x0b\x39\xe3\x8f\x03\x1d\xa0\x28\xc6\xb3\xab\x69\xfa\x6d\x02\x98\xfd\xf9\x6b\xf3\x03\xda\xf7\x34\x49\xa1\x45\xc3\x82\xb5\x11\xa1\xf2\x99\xe0\xa2\x3c\x19\x46\x5f\xde\x8a\xb9\x7a\x11\x7d\x12\x31\xf6\x92\xb7\x23\x1b\x9a\x41\x6d\xf0\x0e\xb7\x75\xe9\xeb\x16\xcd\x1e\x90\x5b\x5f\x72\x30\xba\xf8\x5a\x2b\x00\x02\xd6\xf2\xd1\xfb\xff\x91\x7e\xb7\xcb\x4c\xf0\xdc\x63\xfa\x86\x4a\x6d\xc8\xf3\x2c\x4c\x4f\x0a\xeb\x18\x65\x72\x2a\x04\xd2\xa0\xf4\x0a\xec\xf7\xff\x81\xf9\xc0\x19\x39\x19\x78\x3a\x7b\xa6\xee\x4a\xa1\x88\x7e\xc7\x05\xb5\xd2\xa9\xf6\xc9\x11\xff\xcd\xa8\x90\x8e\xf5\x70\xd1\x85\xe8\xbf\x99\xdc\xd7\x2f\x30\x8c\x0f\x3f\x4b\xeb\xf4\xf2\xd5\xda\xd6\xc0\x5d\x76\xbd\xa1\xc7\x7d\x02\xf2\xf1\x3f\x4a\xdb\x41\x12\x36\x52\x77\x20\xfa\x67\x45\xbe\xf5\x09\x5e\x82\xf4\x07\xbd\x19\x5f\x09\x21\xfe\x05\xc1\x0d\x7b\x48\xba\x05\x70\x02\xb8\xa8\x94\x09\xd4\x13\xb8\x74\x0e\x4e\x5f\x21\x04\x77\xe8\x28\x31\x0a\xd9\x7b\x1c\x94\x79\xf4\x72\x8f\xdf\xcf\xce\xd8\xe3\x63\x1f\x35\xbc\x9e\xa6\x21\x59\x40\xbd\xda\x0e\x55\xb7\x18\xbb\x36\x58\xe7\xa7\x8e\x41\xa4\x0c\x9d\x28\xdc\x6b\x47\x3b\xdb\x6e\x05\x63\xbe\x18\x17\x27\x54\x12\x13\xe7\x5b\x30\xad\x00\x49\x3b\xb9\x69\xa3\x0e\xcf\x2e\xa0\x74\xbc\x89\x5d\xd1\x28\xd0\x00\x32\xea\x79\x35\x71\xec\xa6\x83\x6d\xef\x6a\x72\x84\x5b\x78\x75\xc8\x17\x66\xed\x72\x4e\x52\xd5\x12\x92\x26\xdc\x8f\x21\xd5\x06\x60\x03\x29\xcc\xb5\x67\xac\xe1\x56\x31\xa0\x01\x11\x4b\xa4\x9b\x77\x28\x52\xc3\x31\x70\x86\x78\x71\x5f\xc9\x9f\xd0\x93\x4c\xa7\x0f\x16\x75\x66\xc5\x61\x8b\x9d\xd0\x62\x92\xdb\x21\x59\x33\x49\x23\x4f\xda\x42\xd8\xd8\xa5\xa9\xe4\xa3\xe1\x1c\xd4\x5d\xff\x70\x33\x28\x2c\x99\x16\xe4\x8a\xf0\x16\x82\x34\xfc\x48\x3d\xe4\xd1\xde\x67\x1f\xe3\x41\x33\x0a\x9f\x12\x2d\xbe\x6a\x5f\x2e\xb1\x1d\x17\x42\x64\xf8\x23\x51\xa2\xa1\x18\xd6\xb9\xa0\x7e\x47\x43\x79\xe1\x5c\xed\x1d\xed\xaa\x2d\xc0\x01\x61\x60\xb9\x79\x4b\xa7\x27\xad\x64\xc4\x92\xd0\xd7\x46\x26\xd3\x7f\xfa\xd1\x3c\xdc\x6b\xa5\xbb\xa3\x9b\x2a\xe0\x9a\xe1\x1e\x53\x54\xf5\x11\xe7\xd9\x0e\xce\x74\xf8\xb4\x84\x2a\x1c\xb7\xb2\xde\x70\xc3\x6d\xac\x31\x46\xc3\x46\x5f\x7e\xbe\x89\x3f\xa8\xd4\x9f\x3b\xa4\x1b\xec\xe3\xfb\xa7\xc0\xd3\xdb\xd5\xb8\x56\xfd\x65\x76\x0c\x80\xa1\xec\x7a\x78\x63\xa7\xbf\xdb\x3b\x81\x1d\x85\x32\x15\x35\x2c\x3b\x2e\x91\x07\x91\xa3\xb7\x09\xcd\x7c\x0c\x82\x47\xc9\x5a\xeb\x4b\x37\xff\xfd\x61\x44\x29\x86\x54\xfc\x6b\xc0\x9b\xc9\xa2\x55\xe0\x1d\x32\xfe\xd4\x67\xda\x3c\xc6\x42\x5d\xb9\x28\x1d\xee\x5f\x01\x0c\xce\x46\x68\x59\x5c\xe5\xec\x6c\xe4\x57\xd2\x41\x73\xc5\x52\xc8\xeb\xe3\x69\x77\x3c\x08\x3c\xb6\x5b\xfc\xca\xf0\xaa\x59\xfa\x06\x37\x83\xca\xd7\xc8\xc3\xe3\x0e\xd1\x4e\xd1\x64\x0c\x13\x83\xac\x11\x9e\x7e\x3d\xbf\x45\xf8\xbe\x3b\x6c\x6a\x7e\xb0\x03\x8d\x53\xcd\x0a\x32\xc5\x7c\x2e\x8f\xc9\x48\x29\xcd\x9a\x45\x46\x9c\xed\x44\xf9\x6f\xe5\x69\xc1\x70\xf0\x36\x67\x00\x33\xae\x19\x3d\x56\xe2\x0d\x05\x28\x9c\x44\x9e\x62\xac\xd8\x27\xac\x43\x71\x7b\x2a\x76\x20\xec\x2b\x1f\xa5\x4e\xb8\x6f\x3a\xc8\x6e\xfa\x16\x1d\x5f\xde\xea\x96\x83\xef\xb4\x49\xf1\xc0\x97\x05\x8d\x7a\xab\x96\xcd\xf6\x38\x67\x47\xac\x51\x3c\x95\x42\xa4\xde\x57\x12\x67\x4a\xb3\x54\xec\xa8\xb5\xa5\x08\x44\x3b\xa4\xf6\x03\x10\xda\x37\xc8\x7c\x8c\x0c\x41\x46\x61\xd0\x9f\x88\xc0\x86\xf6\x68\xfe\x19\x90\x88\x5c\xe6\xb7\x02\xbf\x28\xcf\xff\x4f\x4b\x93\xa5\x42\x92\xc1\x1f\x21\xe2\xf2\xd3\x09\x01\x5c\x75\x81\xe1\x05\xa3\x4f\xc9\x6e\x6e\x0d\x7c\x42\x5f\x12\x19\x21\xf1\xae\x54\xb5\x13\x5c\x8d\x9c\x67\xae\xff\xad\x46\xcf\x11\xfb\xad\xf0\xfd\xb5\xb1\xce\x96\xb3\x11\x71\x18\x15\xe4\x76\x0e\x76\x02\xf8\x0d\x25\x31\x31\x16\x04\x29\x57\x84\x81\xe9\xb8\x0f\x60\x54\xa9\x8a\xa0\x17\xdd\xe4\x0b\x38\xd1\xe1\xd5\x3b\x48\x2d\x86\x21\xdb\x92\x4d\x40\x73\xdf\x88\x26\x6f\xf6\x55\x49\x33\x04\x9d\xe9\xb6\xfa\x41\xb8\x8c\x56\xb7\x96\xef\xda\x77\x45\xf6\xdb\xf0\x76\xd5\x39\xb9\x4d\x81\x05\x86\x09\x19\x4a\x49\x2b\x02\x77\x58\x25\x67\xe9\x64\xc0\xa5\xca\xd4\x0d\x15\xed\x32\x60\xe5\x36\x8b\x44\x2a\x3e\x6f\x01\x86\x29\x52\x12\xa9\xed\x30\xcb\xd6\xe4\xe3\xb3\xcc\x7b\x11\x50\x61\x0e\x87\x50\xd5\x2d\x7c\x51\xc8\x7e\xbd\x63\x96\xdc\x16\xb2\x7b\xc0\x3a\xb0\x6f\xb5\x82\x11\xc5\xfd\x10\xbe\xcf\x64\xaf\x62\x17\x1b\x9a\xe6\x65\xd8\xd6\xa1\x5e\xb1\x94\xe4\xca\x43\x48\x77\xd2\xa4\x34\xb5\xad\x24\x79\x63\x29\x8c\x66\x8e\xbf\xe2\x52\xa6\x06\xf9\x51\xc0\x37\x30\x96\xf2\x7e\xe9\x83\x51\xbe\x86\x2b\xda\x3e\xce\xc8\x98\x72\x8c\xe3\x3e\x8c\x4e\x2b\xd8\x5a\x24\x33\x7b\x95\xa9\xcc\x03\xd1\xab\x0c\x60\xa7\x70\x0f\xcc\xfd\x75\x8d\x24\x2a\xcc\x35\x01\x83\x71\x76\x7b\x41\x09\x9c\x2e\x5f\xc6\xd6\xce\xcf\xcb\xcb\xa8\x32\xe6\x00\x5d\x7f\xd5\x08\x91\xcf\xb6\xd6\x77\xad\x3d\x78\x3c\xb6\x83\x7f\x9f\xeb\x0c\x12\xcb\x34\x8c\xd0\x01\x4e\xab\x99\xf4\x8a\x3a\x71\x9a\x5b\x19\x58\xd1\x83\x79\xf3\xdd\xd7\x81\x3b\xd9\x3f\xa4\x84\xf3\x7a\x3e\x84\xc5\x6d\xa1\x3f\x6b\x4a\x2d\x90\xe2\xdf\xae\x5b\x31\xf1\x01\x57\x11\x3c\x32\xe1\xc2\xe8\xf7\x19\xa1\x1f\x7f\xd1\x72\x95\x83\xc4\x75\xe4\x23\xaa\x7e\xf5\x77\x87\x54\x75\xe7\xbf\x00\xb9\x0c\x9d\x1c\x77\xb0\xac\x42\x12\x04\xb5\xe0\x3c\x3e\x56\x2b\xbf\x31\xd8\x30\x38\xdb\x29\xf3\x5f\x68\x46\xf7\x8e\xc9\x7a\x82\x59\x20\x32\x30\x53\xc4\x07\x75\x17\x6a\x90\x9d\x1f\xd5\xf5\x79\xe0\x75\xc9\x55\xee\xdd\xd9\x8e\xa2\x2d\x3b\xd5\xd0\xea\x8c\x05\x8c\x6d\xd4\xf7\xf7\xa5\x56\x7a\x79\x70\x3d\x02\x35\x3b\xe6\x26\x87\xd9\x45\x73\x98\xcc\xfb\x7f\x4d\x92\xf8\xb2\x4b\xee\x63\x3c\xbd\x38\xcc\xa8\xf3\x4a\x46\x2f\x5e\xd2\xb4\xbe\xcf\x49\x01\x17\x65\x24\xa9\x4f\xf9\x71\xef\xa5\xe4\xb6\x80\x59\xaf\x72\x7c\xd1\xea\x19\xb4\x58\x67\xef\x58\x38\x6c\x27\x66\xf4\x98\xce\xec\x0c\xd9\x27\x9c\x42\x76\xfd\xcb\xe3\x8d\xdf\xd5\xa9\xb5\xa0\xa9\x42\x6b\x55\x31\x90\xc8\x62\x64\xea\x07\xf2\x54\x09\x4f\x87\x36\xcf\x98\x61\xbe\x5d\x3c\xfb\x40\xe0\xe8\x3e\x9e\xa9\xa1\x21\x7b\x0e\xd5\x13\xbf\x4d\xbf\x6a\x9e\xfd\x81\x88\x96\x8a\x52\xe4\x94\x6b\x3a\x31\x67\xd7\x26\x5c\x72\x1e\x60\xcf\xf6\x44\x89\x80\xf3\x58\x71\x78\x27\xe6\x34\xac\x8a\x85\x1d\x85\x83\xb6\xb5\x78\xf3\xf8\x01\x14\x04\x00\x62\xf3\x34\x79\x99\x87\x27\x6c\xa8\x04\x15\xea\xec\x94\x42\x97\xb2\x4d\x65\x70\x00\x2b\x97\x99\xf9\x01\xc5\x5f\x34\x71\xbb\x5b\xe7\x8e\x18\x7c\xff\x21\xf6\x04\x9b\x29\x13\x7a\xe4\xdc\x7f\xf2\x53\x69\xff\x5d\xaa\xf5\x53\x9e\x13\xf0\x5a\x62\x17\x4b\x8c\xc9\x1f\x59\x30\x42\x56\xaf\x40\x71\x29\x4e\x84\xdf\x33\xd3\x81\x71\x46\x3e\xd9\x7d\x00\x9a\xfc\xf3\xda\xa2\x74\x34\xb8\x78\x47\xf2\xb8\xae\x8b\xf0\xfb\xf5\xd4\x7e\x50\x32\xd0\x5b\x41\xbe\xe5\x42\x0b\x4f\x6a\x21\x47\xdc\xd3\x3d\x68\x41\xca\x47\xdc\xa0\xf1\x61\x4e\x10\xa2\x09\xd8\x1c\xbc\x0c\xf4\x04\x49\xab\xb6\x4d\xc5\x33\xb3\x55\x39\xbc\xb7\x59\x1f\xea\x0f\x7a\x82\x57\x66\x0e\xd1\xe3\xec\xaf\x64\xf9\x85\xc8\xcc\xb9\xd5\x40\x03\xa8\xfe\x23\xd4\x02\x80\x46\x8c\xaa\x3e\x92\xe7\x92\x0f\x99\xe6\x4d\x6b\xd0\x1d\x9e\x09\x60\x68\x3c\xc4\x64\x7c\x81\xee\xab\x01\xe6\xb5\xde\x91\xeb\x11\x37\x91\x8a\x27\x82\x0c\xa9\xf7\x22\x49\xcc\xea\x87\x18\x91\xc6\xee\x5f\x26\xb0\xee\x06\x63\xe4\x13\x8c\x72\x1a\xe2\x1d\xa9\xe9\x0a\x0c\xd3\x6f\x7a\x52\xe2\xf2\x0d\xa4\xa0\x94\xdd\xfa\x03\x86\xc6\xa5\x82\xe6\x80\x2d\xd5\xbf\x08\xa9\xb2\x44\xdc\x56\x36\x48\xe8\xf2\xc6\x44\x11\x32\x22\x2c\x63\xfd\x5d\xab\xd1\x7f\x7e\xe9\x00\xae\x3d\x5b\x0c\xef\x84\xd6\x88\xa7\xb8\xb5\x7d\xa5\xbc\x35\x4c\x26\x22\x66\x35\x1f\xfb\xee\xf0\x5e\x34\xa1\x67\xb5\xb0\x5e\xd3\xe9\x13\x97\xdf\x47\x69\xa2\xf6\xed\x77\xf6\x6c\xec\x90\xe8\x1c\xbb\xeb\x08\x14\xfc\xc2\x85\x4b\x80\x62\x28\x12\x39\x4d\xd4\xa0\xab\xe1\x57\x80\x11\x72\xf9\x5c\xbc\xdb\x75\xc4\xaf\x9f\x31\x8e\xc0\x77\x8a\x6a\xe4\x0c\xd3\xf8\xcd\xd1\xbf\x80\xa6\x05\x3e\xbc\x7f\xe5\x74\xcb\x33\x6e\xaf\x34\x0b\xa5\x03\x6a\x6f\x49\x61\xe6\x67\x06\x9c\x09\x8c\x63\x36\xf2\x7b\x0e\xc2\xaa\xde\x8c\xcd\xcb\x1e\x65\xde\x75\xf2\xbd\x29\x22\xe2\x17\x68\xb9\xf8\x83\xba\x30\x05\x24\xa3\x3f\x33\xc0\x8b\x55\xe4\x8b\xcb\x30\x20\x4b\xc3\xc3\x6a\x5a\xde\xc3\x4a\xcf\x13\xa0\xcb\xcd\x21\xd2\xf0\xc8\x3c\xd6\x07\x3c\x93\x2f\xef\x8d\x79\xd4\xf9\x28\x64\x3e\x0b\x90\x6f\x3d\x3f\x7d\xac\x35\xcb\x07\xe5\x1b\x93\xd7\x33\xce\x12\x40\x27\x41\x49\xbb\xee\x18\x47\xf7\x31\xa9\xd8\x27\xeb\x24\xc6\x67\x15\xa9\x12\x96\x6c\x00\xb7\xf6\xfd\x1b\xc9\x3e\xb0\x5e\xa2\x4c\x6a\x6e\xea\xcd\xf4\xd3\xf5\x4e\x3d\x23\xcb\xcc\x15\xb1\x4e\x66\x8e\x0b\xc3\x7b\xa9\x3d\x23\x25\xb2\xac\xe9\xe6\x6b\xfe\x13\xa0\x1c\x5d\x32\xcb\xff\x70\xb1\x71\xb9\x80\xc0\xa4\xe3\x22\xfb\x21\xa0\xe8\xf7\x0a\x6d\xe9\x2c\xab\xe8\x79\xf5\x56\xee\x80\x82\x65\xe2\x08\xf9\x4d\x85\xd7\x4c\x27\x33\x78\xb0\xa4\xdf\x7e\x2b\x12\x74\x91\x61\x6b\xce\x51\xdd\xac\xf1\x84\x05\x4f\x6a\x0f\xa0\x2c\x01\xfe\xf2\x97\x27\xc8\xdb\xca\xf9\x98\x4e\x68\x9e\x0a\x57\x74\x13\x78\x7b\x21\x6a\xd6\x16\x8d\xae\x43\xf5\x68\x1b\x82\xe1\x46\xec\x44\x73\x73\xf6\x2e\xb2\x57\xe8\x55\xb7\x9f\xba\xeb\x99\xb5\x0a\xeb\xb5\x4e\x61\x59\xd8\xfb\xf0\x5b\xe4\x77\xd7\xdd\x18\xc8\x70\x73\x53\xb1\xe4\x7a\x48\x0c\x6b\xa3\xb3\xe5\xcf\x76\x70\xb4\xb2\xea\x86\x5a\x4e\x83\x14\xea\xc8\x84\x44\xf4\xfc\x28\xd4\x1f\xe6\x23\x35\x2e\x00\xd0\x0e\x21\x20\x84\x7c\xdd\x07\x90\x74\x91\x2e\x7b\x74\x7b\xb3\xd4\x3f\xb8\x95\xb9\x1c\x96\x08\xdb\x93\xb4\x1b\x2a\x3f\x66\xa1\xb9\xd8\x3c\xee\xf6\x84\x59\x76\x82\x62\x3b\xfb\x05\x5a\x49\xb5\x98\x8d\x4c\x4b\xc8\x9f\x1c\xcb\x2c\x48\xa1\x55\x7a\x9e\x53\x54\x9f\xa8\xc8\x2d\x22\x69\x89\xbd\x38\xde\xa6\x72\x42\x34\x5a\x77\xb8\x6c\x73\x5e\xaa\xa2\x77\x07\xbc\x05\x73\xe6\x5b\x9a\x98\xd6\xe0\xdd\x38\x8a\x10\x4d\xd1\xa6\x2b\x28\xfd\x92\x27\xe8\x52\x24\xa6\x99\x2a\x08\xfc\xfa\xc9\x2b\x88\xba\x12\xa7\xe3\x58\xe6\x96\xb8\xdb\x70\xda\x8b\x9c\xfe\xca\x4a\x6f\x60\xa7\x72\x6d\x41\x42\x22\x81\x4c\x82\x65\xe4\x9b\x6d\x4c\xfc\xf0\x7d\x79\xbf\xcb\x1f\x67\xc2\x37\xdd\x83\xf2\xdb\x5c\x2a\xe7\x8a\xc1\x26\x80\x1b\x45\xa9\xea\x8b\xff\x06\x1a\x94\xc9\x7f\xbd\xa0\x33\x61\xa4\x66\x99\x12\x07\xe8\xe6\x4d\x64\x2a\x0c\x26\x32\x4b\xfd\x1d\x19\x2b\x0a\xd8\x30\x26\x33\xef\xd0\x69\xd8\x61\xf3\x7c\xdc\xbb\xa6\x22\x16\x1e\x31\xaa\x47\xe8\x7e\x7f\x76\xc5\xb2\x53\xc4\x70\xba\xf5\x10\x32\x10\x10\xda\xe3\xfa\x0f\xb8\x42\xf4\x06\xbb\x95\xff\x89\x73\x72\xaa\x69\xd8\x1b\x5e\x9e\xd7\xff\x3a\xdc\xe4\x0f\xa2\x19\x33\x05\x89\xfd\x98\xb9\x37\x31\xda\xef\xf6\x4d\xf0\xf7\x1e\x39\x66\x78\x7c\xae\x29\xdb\x82\x3b\xb9\xc2\xb4\x0c\x34\x40\xc6\x78\x0a\x58\xbf\x72\xd5\x7d\x15\x2c\x4a\xf9\x92\x03\xb3\x0d\x90\x08\xd1\xf0\x3a\x97\xd9\x16\x04\x59\x49\x40\x4f\x93\xb0\x7f\x9f\x18\x44\xed\x2d\xea\xd9\x03\x20\x9c\x73\xa8\x02\x2a\xcc\xc4\x2a\xfd\x50\xb3\x40\x44\x94\xf8\x7e\x80\xb0\xf4\x96\x0e\xd1\x5b\xe3\xfd\x8b\x10\x4c\x73\x0b\x92\xe5\x40\xec\x0e\x82\x39\x2b\xf8\xce\xae\x0c\xfe\xe0\x2f\x9f\x6a\x06\x21\x30\x6a\xbe\xba\x1a\x0b\x6b\xb4\xa3\xde\x47\xe8\x0f\x56\x0b\x34\x62\xff\xdb\xf4\xbd\x71\x40\x4d\x5c\x8a\x46\x23\x6a\x6c\x1d\xc0\x21\x1f\x3c\xcb\x65\x3a\x61\x9d\xf3\x01\x12\x84\x7a\x61\x9c\x5e\xa7\x2c\xa7\x10\x90\x88\xc7\x6c\xb8\xf8\x96\xc2\x52\x83\x41\x17\xb2\x4d\x03\x31\x04\x31\x3e\x1d\xfe\xfd\x90\x28\x7e\x63\xc5\xbd\xe4\x18\x87\x52\x51\xd9\xa8\xe5\x1f\xa8\x02\x02\xa4\x42\xd9\x58\x4a\x3e\x54\x8a\x70\xd8\x13\xbf\xad\x21\xd4\x44\xae\xe9\x5a\x7b\x9d\x35\x2f\x3d\xb2\xe3\x31\x00\x71\x10\x12\xc5\xcb\xcd\xe7\xfe\x47\x2b\x43\x05\x25\x0d\x7a\x4d\x05\x7a\x58\xa8\x33\x41\x83\x1d\xfd\x2a\xf5\xcf\x50\xfe\xfa\x31\x0a\x8b\x57\x6e\xac\x11\xa1\xe3\x48\xde\x7e\x6c\x45\x47\xa4\x9c\x2b\x1e\xa9\x6d\xea\x8e\xb8\xed\x6c\x48\xbe\x72\x2c\x8c\x7e\x1f\x9f\xb6\xfd\xb7\xa7\xbb\xea\x61\x39\x94\xc6\x36\xb5\x1c\x84\xd8\xf6\xa0\xf4\x86\xcf\x69\xcd\xda\xb3\x25\x8d\x2f\x64\x66\x66\xf5\xd4\x1a\xd4\xbc\x12\x25\x5d\x66\xd7\xbb\x12\x99\xf2\x47\x7f\xe7\x94\x91\xe2\xd9\x78\x3f\x68\xc0\x95\xf5\x41\x47\x99\x9d\x7a\x98\xaa\xe6\xa0\x63\xc4\x61\x34\x44\x74\x0f\x37\x54\x83\x6a\xba\xfb\xd4\x2c\x79\x69\xa5\xa0\x31\xe7\xfc\x5c\x81\x44\xfb\xc6\xca\xa6\x22\xa0\xb6\x74\x2a\x90\xd3\x9c\x5e\x07\xe8\xbb\x31\xdb\x02\x1d\x69\x4d\xba\xc2\x8e\xbb\x1d\x67\xbb\x7c\x7e\xc0\xf0\x8d\x0d\x61\xe1\x18\xec\x59\x9f\x38\xfc\x37\x77\x1d\xdb\xfa\x26\x58\xbb\xec\x87\x48\x9a\xa0\x75\xe4\x58\x38\xf0\xf8\x6e\x43\x0c\x4a\x87\x8b\x35\x68\x48\x4c\xa8\xa3\xcf\xd6\xdd\x8f\xb4\xa0\x8d\x26\x13\xc7\xa4\x81\xa4\x01\xc5\xdb\xac\x56\xc3\x71\x78\xb6\x5f\x9a\xbb\x1f\x18\xc9\xf9\x34\xed\xe7\x8a\x20\x30\x77\x25\xf4\xfe\xd9\x22\xdb\x0d\x0e\xff\xe5\xe9\x94\x65\xda\x8b\x65\x74\x0f\x17\x3c\xab\x39\xa7\xc3\xd2\xd1\xb4\x2b\x09\xd8\xae\x0a\x69\x01\xad\x3d\x29\x80\xda\x32\x38\x9c\x8e\x7d\x0a\x67\xff\xdd\x42\xbf\xcf\x55\x37\xd8\xe4\xa2\x95\xab\x74\x7d\x5e\x69\xd2\x42\x9a\xfb\x3c\xfc\x91\x9f\x22\x01\x16\xfa\xf6\x90\x64\x5f\x90\x27\x2d\xf3\x1e\xaa\x02\x08\x6b\x88\xb3\x2d\x00\x71\x7b\xfd\x46\x25\xbb\xbd\x42\x54\x1e\xd3\xb3\x53\x48\x72\xfa\x0e\xea\x13\x0a\x29\xae\xc8\x43\xc6\xf2\x9d\xb3\x58\x41\x84\x3c\x6d\x7b\x4d\x28\x9d\x83\x53\x72\x09\x31\x6b\xf6\x86\xa5\x52\x5b\xa9\xde\xc8\x78\xaf\x5f\xe5\x37\x74\x77\x64\x38\xa7\x85\x07\x07\x2e\x7d\xa2\x96\x1f\xa3\xdb\x00\x15\xb6\x13\x41\x1b\xf5\xd9\xe7\xae\xc9\x3a\xdc\x8e\x02\x0a\x08\x9a\xb2\x81\xaf\x84\xe0\x36\x3d\x01\xf0\x0e\xb0\xc5\x48\xc5\xe3\x5b\x65\x3a\xbf\x1a\xf9\xc0\xbf\xf8\xee\x74\x82\x36\xc1\x88\xb8\x4d\x95\xba\x1a\x64\xd5\x23\x99\xfa\xe4\xda\xd8\x49\xf4\x67\x06\x9f\xf0\xc9\x34\xbb\x21\x63\x24\x12\xad\x92\x2b\x6c\xf4\xfb\x7f\x0f\x9e\xf7\x78\xc2\x84\x7c\xc2\x0a\x73\x63\x4d\xcb\x1d\x51\xd9\xd9\x65\x0a\xfa\x7c\x6d\xd7\x52\x27\x3e\x68\xfd\x07\x02\x99\x34\x58\x8a\xf7\x93\x2b\xb6\x87\x25\xad\xf4\x67\x8b\x54\x37\x41\x82\xa8\x66\x75\x97\x84\xe0\xa9\x2b\x85\xb2\xbd\x4a\x3e\x83\xe9\xee\xa7\x38\xf7\xec\x5b\x7f\x7b\x35\x6f\x5e\x76\x3e\x9d\x74\x6d\xe4\x70\x4f\x0c\x3e\x9d\x36\x70\x63\xc6\x66\xfd\xcf\x51\x48\x88\xb2\xdc\x22\xfe\xaa\x63\x33\xf1\x17\x64\x2c\x62\xee\x3c\xe4\x5f\x4e\x76\x79\xdc\x5b\x19\x5e\x98\x43\x15\x7e\xdf\x0b\x53\x38\xfd\x21\x11\x75\xd5\xeb\x68\x7e\x58\x9b\xbe\x96\x7e\x6d\xdf\x59\x59\xdf\x2d\x89\x98\x9a\x9d\x0d\x64\x20\x9d\x75\xcc\xd3\xdd\x8d\x05\x97\xb4\xe2\x2c\xed\x1f\xbe\x2d\x63\xbd\x08\x10\x90\xdb\xf2\x57\x76\xa5\x9a\x13\xc5\x8a\xc8\xba\x93\xd2\x5b\x97\x9c\x06\xec\x68\x1d\xaa\x05\x90\xb3\xf2\xa6\x7f\x26\x25\xa5\x6d\x2d\x8c\x3a\xfb\x41\x38\xba\xa4\xb4\xc8\xa5\xb0\x60\x87\x7f\x24\x11\x95\x2a\x89\x6d\xa8\x00\x8b\xb2\x53\x2d\x8d\xeb\xd6\xe3\x58\x91\x82\xc7\x87\xcb\x4f\x3b\xd2\x74\xc6\x3a\xe1\x6d\xa8\x0e\xf7\x8b\x66\x20\x36\x8f\x6e\x90\x27\x04\xcf\xc1\xe5\x45\x5c\x83\x54\xd5\x9a\x07\xa4\x57\xc6\xaf\xaf\xc7\xd4\xad\x42\xb6\xc5\xfa\xf6\x3d\x21\x88\x83\x49\x99\x47\x7e\x7f\xeb\x97\xc4\xca\x4b\xce\x2b\xaf\xc3\xf1\xf1\x20\x00\x8b\xa8\x90\xf2\x43\xd1\x9d\xcc\xfb\x88\x34\x53\x45\xe7\x4b\x1d\xeb\x6a\x38\xaa\x65\x6f\xc2\xc9\x0d\xb9\xd3\x0d\x0f\x43\x1a\x42\xa5\x0b\xd5\x62\xd1\xdf\x54\xbf\x08\x2e\x4b\xae\x95\x66\x34\x24\xef\x6c\x44\x7b\x8c\xe1\xb8\x54\x38\x42\xaa\x44\x42\x2b\x4e\xd0\xcb\x70\xc5\x14\x7a\xf1\xfc\xff\x8a\x6c\xab\x4b\xb4\xa5\x0d\x6b\x6c\x02\x21\x7c\x65\x6b\xea\x48\x31\xac\x61\x8b\x23\x17\x0a\x82\x54\x17\x12\x34\xcc\x7f\xb1\xe4\x15\xa3\xa2\xc6\x0d\x6e\x51\xe5\xba\xe1\x6c\x2c\x76\x44\x05\xf7\x44\x45\xf9\x1d\xc3\x8a\xca\xe4\x0e\xce\xff\xa2\x2d\xcd\xc1\xa0\xd8\xcd\x44\xfc\x74\xfd\x7a\x40\x85\xcf\x02\x92\xe8\xa3\x99\x02\x49\x0b\x8e\x2c\x88\xd4\x43\x6d\x88\x60\xd4\xe3\xf8\xaf\x88\xa2\x0c\x8f\xfa\x91\x58\x24\xc3\x2f\x9b\x40\x33\x1c\xa3\x2e\x46\x58\xa3\x92\xe1\x2b\xd0\x5d\x5a\xbb\x15\x08\xf2\xf6\xb5\x5e\xcb\x2d\xf1\xde\xf8\xf4\x53\x08\x99\x12\x27\x86\xa6\xd2\xd5\xc7\x8e\xe5\x53\x5b\xf7\xbd\xb7\xb6\x7d\x62\x8a\xd4\x92\xed\x29\x40\xa2\x1c\x1a\xc8\x25\x4c\x9b\xcd\x99\xed\xe7\x12\x08\xb0\xa3\x90\x1d\xed\xc5\x63\xb4\xee\x53\x6f\x18\xeb\x68\xa9\x67\xff\x95\x3e\xf3\x1d\x12\x02\x3b\xc3\xcc\x1b\x33\xd4\x84\xd0\x0c\xf0\x3b\xe7\x59\x9d\x68\x7d\x7e\xd1\x7c\x01\x9f\xe5\x9a\x68\xbd\xa7\xfd\xfd\xdf\x0c\xe7\x5c\xf6\x74\x28\xb0\x6a\xfe\x90\x0e\x6c\x43\x69\xaf\xbf\xa4\x88\x99\x24\x17\x9e\xef\x8f\x74\x45\x4d\x57\xfc\xc0\xdb\x3a\x36\xbb\x22\xd8\x7c\x3c\x95\xeb\x7a\x0f\x76\x6e\xc5\x33\x64\x85\x4c\xd8\x4d\xc4\x31\x1d\xee\xb2\x66\xc5\xde\xaf\xa9\x65\x5f\x76\xae\xb2\xd7\xbf\x29\xf9\x7a\x2e\xfd\xed\x91\xa0\x58\x70\xc5\xf2\xfa\x48\xd7\xcd\x22\x50\xcc\x76\xd2\x4a\xc0\xc8\x1d\x4a\xad\x93\xe2\xbc\x59\x78\x60\xb8\xdf\x38\x4d\xce\x56\xb8\xdb\x71\x14\x84\x61\xaa\x3d\x2d\x42\x79\x24\xa4\x9a\x8f\x6a\x3b\x1e\x7e\x17\xe9\x73\x24\xc4\xda\x8a\x4a\xfe\x68\xad\xbf\x35\x17\xab\xbb\x72\xdf\x32\xea\x5b\x17\x5a\xd5\x98\xc1\x6c\x86\xcb\x1d\xd2\x0a\xfe\xf4\xa9\x7e\xd1\xa6\x8c\x23\x1b\xbd\x99\x72\x88\x47\x8f\x7c\xf4\x85\x1f\xe1\xff\x73\x79\x3e\x9e\xfe\x04\x29\x22\xc3\xe1\xb5\xa2\x4a\x88\x79\x68\x46\x1a\x8f\x38\x29\x33\x24\xc2\xf2\x49\x78\x1e\xc5\xf4\xc7\x4e\x67\x92\x9e\x11\x5c\xac\xa9\x2a\xbc\xdb\x27\xd5\xd2\xad\x16\x6f\xe4\x9a\x0d\x7c\xaa\xe3\x97\x4a\x18\x48\x13\xe9\x7c\x0c\x68\x8c\x8a\x94\x21\xef\xad\x3b\x1f\xfd\x5a\xeb\x70\xcf\x52\xed\xa5\xf7\x68\x85\x6f\x21\x72\xee\xc0\x09\xf4\x56\xd9\x0c\xb6\x26\x2b\xc6\x25\xe7\xd6\x81\xcf\x8d\x3b\x4e\x49\x8a\x3a\x61\xbd\xa3\xbb\xf8\x9f\xee\x86\x9b\x11\xf2\xf7\x66\xc0\x8a\x51\xfd\x14\xe2\x2f\xec\xf4\x51\xb0\xff\xdf\x50\xb3\x02\x87\xab\xdc\xdf\x37\xdf\xad\xe9\x32\x6f\xea\x8e\x37\x47\xbf\x5b\x42\xaf\x64\x71\x70\xa7\x6a\x2d\x9d\xd3\x53\xc1\x6b\x9f\x03\x30\x90\xaf\xd4\x02\x6d\xea\x5c\x1a\x0e\x2a\x69\x6c\xb3\x6a\xbe\x9e\xbf\x4d\x78\x6b\x05\xfd\xeb\x10\x5b\x21\xf2\xe7\xc7\x7c\xc0\x24\xec\x30\x39\x5d\x33\xcb\xf8\x36\xc7\xe8\x8f\x22\x65\x53\x57\xad\x6e\x2f\xbb\x0d\xfd\x2d\x26\xa6\xba\xea\x0a\xf2\x71\x9a\x1f\xd4\x91\xbc\xb9\xb0\x55\x2c\x46\x03\x20\xe4\x98\x54\xca\x88\x34\x94\xfb\x1d\xd5\x81\xee\x2e\x8e\xf0\xcd\x48\x9f\x48\xea\xd5\x55\xc7\x31\x33\xd7\x53\x3f\xde\xd1\xd6\x49\x89\x99\xcf\xcb\x15\x91\xa3\xd7\xa8\xbb\x87\xa1\x2a\xf8\xd7\x28\x24\x72\x8e\x0e\x3f\x89\x89\x42\x91\x1d\xbc\x8a\x25\xc5\x7e\xab\x1e\x0d\xb9\xc1\x0f\x63\x6e\xb4\x67\x59\xbf\x88\xe2\x1c\x25\xa3\x76\xd3\x04\x8c\xf6\x87\x3d\x61\x6a\x3d\x3a\x81\x06\x97\x9d\x06\xc1\x56\x02\x0c\xf3\xab\xa1\xfa\xe0\x0a\x5b\x1f\x72\x9e\xe2\x54\xda\x58\x85\xf3\x4a\x77\xe3\x29\xfc\x25\x07\x41\x4f\xfd\x4e\xc7\x9e\x9b\xf4\xdd\xac\x7a\x95\xfb\x25\x23\x6c\xf5\x59\x55\x9c\x1f\xca\x6e\x0b\x04\x5b\xbc\x5b\x68\xd5\x13\x40\xff\x28\x42\xff\xd2\xa8\x32\xb6\xc1\x5f\xa4\x36\x40\x6c\x76\xb1\x40\x9c\xc7\xf7\x3a\xf1\x72\xca\xf6\x06\x78\x45\xde\xfe\xc6\xa5\x36\xd8\x50\xbe\xb7\xa6\x4a\xf3\x17\x03\x8f\x88\x9f\x86\xee\xd6\x60\xcd\xee\x95\x1d\x64\xab\xb5\x0a\x26\x9a\x6d\x77\xa4\x85\xc4\x3c\xab\x34\xa4\x1b\x35\xf9\x16\x98\x9c\x85\x87\xc4\xa6\x83\x43\xaf\x02\xb2\xb5\xb7\x4c\xbc\x37\xa2\x13\x0d\x37\x77\xc3\x07\xa3\x47\x17\xfa\x01\xba\xf4\x35\x4e\xfd\xca\x20\x69\xdd\x8c\x34\xd4\x27\x86\xc2\xfd\x39\xda\x7e\x85\x53\x31\x56\xe7\x32\xe7\x34\x6d\xe6\x95\xa2\xc8\xed\x3a\x65\x1b\xc6\x3d\x65\x36\x22\xb5\x63\xd2\xbe\x6f\x12\x86\x8d\xbc\x67\x49\x32\x98\x32\x9b\xcb\xe8\xbf\x6c\x59\x4d\xa6\xbc\xb3\x27\xdd\xc9\xd0\xf5\x92\x6d\xa7\x1d\xb9\x05\x5a\xc0\xae\x64\x3c\x36\x5f\xb6\xba\x80\xec\x26\xf7\xdf\xeb\xa8\x36\xc1\x16\x1e\x89\xd9\x4f\x7e\x53\x3e\x5b\x80\x36\xad\x5b\x9a\xae\x6a\xee\x8d\x63\x11\xf0\x87\x42\xf2\x0f\xbf\xf8\x5f\xf7\x4e\x03\xaf\x1d\xc0\x1b\xa6\x65\xb9\xfa\xef\x41\x1d\x86\xea\xf5\xb8\xec\x17\x6e\xf0\xb2\xa5\x02\x3e\xb0\x5d\xe1\x85\x42\x32\xfc\x23\x44\x22\x74\xd0\x03\xcc\xd6\x0c\x92\x1e\x74\x76\xef\x87\x4e\x31\xcc\xfa\x79\xd5\x17\xf7\xaf\x38\xdd\x0d\x7b\x81\xdd\x17\x64\x82\x5a\x0c\x9d\xe5\xc3\x66\x69\xf3\x6b\x03\x2d\x99\xec\x9d\x22\x06\xb7\xff\x2d\xc6\x7c\x89\x6a\x90\x4e\x5b\xd6\xd8\x67\x41\xa4\x01\x1d\x6a\x99\xc5\xd9\x80\x54\x4a\xb3\x50\x99\xca\x6e\xcd\xda\x1c\x6a\x19\x4f\x00\x34\xf2\x67\x04\x5f\x13\x20\x74\xc6\xcd\x2c\xe3\x18\x97\x73\x81\x74\x43\x67\x99\x8c\xcd\x98\x8a\x6e\xd3\x62\x6e\xf6\x9c\x04\xe4\x7f\x13\xa0\x15\xcc\x33\xfe\x6f\x00\x8b\x03\x6d\x5c\x14\x9a\xee\xff\x48\xed\x09\x39\x72\x48\x44\x27\xed\xfc\x1c\x6b\x13\xf7\x96\xd6\xe4\xbe\x20\x28\x4c\xd8\xac\x7d\x79\xdf\x65\x4c\xcf\x1e\xf9\x94\xdc\xa5\xbb\x70\xa6\x44\xfa\x3d\x5e\x9b\x8e\xe9\x57\x13\x97\x8e\x88\xa3\xe9\x26\xfb\xc9\x90\xd9\xba\x53\x5d\x23\x82\x59\xda\x1d\xb1\xac\x1e\x7d\xf7\x53\x13\x01\x74\xc2\x55\xdf\x47\x50\x27\x53\xc4\x32\xb9\xb8\x73\xbd\xb3\xa4\x5a\x51\x5c\xd4\x74\xa4\x38\x53\x84\x1a\x5d\x74\xad\x1a\x7d\x53\x68\xf8\xbe\x2c\x97\x2b\x0e\x40\x28\x2c\xa8\xdb\x29\xca\x9d\xef\x8a\xd4\xdc\xee\xf9\xe7\x5f\x6c\x0d\x51\x3c\xec\xb9\xfb\x01\xbd\x2a\xc9\x8c\xcc\x48\x98\x06\x4e\x96\x95\x60\x67\x1c\x0f\xe1\x4f\xec\x7f\x31\x1c\xad\x6d\x5e\x3c\x90\x9c\x58\x89\x85\xd4\x72\xa2\x83\x38\x1d\x87\xe1\x4e\xfa\x7b\x47\x74\x31\xa6\x17\x28\xf9\x1b\x7b\x7c\x67\xc3\x39\x93\x7d\xf0\xc4\x41\x4e\xfe\x4c\x66\x15\x97\x07\x2e\x4e\xb4\xe4\x05\x8c\x52\x75\x45\x77\xf7\x47\x7d\xe8\x21\x92\x46\xa3\xeb\xdd\xe5\x06\xcc\x5f\x92\x1d\xb9\x8a\xc3\x67\x13\xb2\x6b\x5e\xf4\x96\xbc\x8d\xad\x17\x56\x6f\x07\xf0\x66\xf9\xd7\x2e\xf9\x97\xbd\x01\x44\xdd\xa5\xdd\x8c\x15\xed\x8d\xb4\x5c\x85\x68\x48\x87\xfb\x5f\x94\x21\xb8\xc0\xf4\x7f\x3d\x45\x35\xa4\x48\xb7\xa8\x9c\x1a\x69\x51\x0e\xbc\xa7\x22\x69\x7d\xcb\xf1\xd7\x44\x05\xc5\xe6\xf8\xc6\xff\x38\x63\x8c\x82\x77\xa3\xf7\xe9\xbd\x35\x89\xa5\x4c\x38\xdf\xd1\xe3\x36\x83\xf3\x4a\xa3\xaa\xd9\x4f\xd0\x6c\xa5\xd5\x43\xe3\xfa\x56\xb4\xf8\x99\x5a\x59\x85\x03\xd4\x04\x00\xdb\xc5\x2c\xc8\x9b\x4b\x24\xb8\xc4\x7e\xd1\x0a\xf4\x52\x84\x1e\x30\x28\x9a\x74\x24\xa3\x97\xf9\x49\x13\x69\x95\x10\x92\x53\xd1\x9f\x03\x35\xf6\xd5\x89\x98\xdc\x6f\xc9\x73\x3c\x45\xcb\xc6\x6a\x0b\xea\x5d\x7c\x6e\x10\xc6\x9d\x39\x57\x10\x52\x88\xad\x2f\xde\x42\xf8\x0a\x80\x72\x9f\xcb\xfc\x33\x03\x22\x8a\xca\xdf\x06\x97\x3d\xf2\xe2\xce\x40\x35\x39\xf5\x68\x26\xdb\x81\x2f\x28\x64\x53\xa1\xa7\x97\x50\x01\xa6\xae\xaf\x3a\x06\x19\x50\x41\x02\x2d\xc5\xf7\x38\x93\x11\xff\xd0\x3b\x10\x47\x31\xda\xf0\x7e\x0a\x08\xb2\x2b\x1c\xc1\x21\xde\x56\x85\xa7\x5c\x62\xd6\xbd\xda\x5b\xed\xea\xc0\x9d\x36\x0d\x9a\x59\x60\x32\xc1\xe0\x76\x82\xab\xed\xa1\x6d\x4f\xd1\xc7\x59\x52\xca\x07\x25\xb4\x0f\x97\x51\x8d\x9d\x2c\x4f\x95\x7f\x99\xf1\x3b\x96\x8e\x99\x95\x1b\x67\xa3\xe8\x31\xb0\x2d\x5d\x74\xb7\xab\xe4\xe0\x0f\xd8\xfa\xa4\xd2\x9e\xd2\x22\xb2\x6d\xb1\x40\x02\x74\xd5\x9f\x83\x56\x16\x13\x1e\x0c\x89\xbd\x12\x6f\x4a\x2c\x04\x5c\x83\x35\xfa\x71\x35\x9e\xc8\xfe\xbd\x02\x12\xbb\xd3\x03\x84\xd5\xb2\x71\xc2\x3c\xef\x6a\xb0\x83\x8e\x10\x1a\x96\x1c\x04\xda\x62\x16\x70\xec\xcf\x22\x04\x44\xe1\xb1\xb3\xc4\x41\x0a\x12\x48\x01\x02\x93\x84\x55\x19\x9c\xf4\x59\x1c\x82\x33\x09\x8e\x7f\x44\x83\xee\x58\xe4\xb5\xf2\xd9\xa9\xda\x6f\xb6\x32\x85\x85\x12\xa5\x3c\x93\xee\xfc\xd1\x82\x24\xdf\xe4\x79\x75\x56\xaa\x5f\xf7\x9f\x90\xa0\xa4\xea\x03\xd9\x3e\xe4\x51\x2c\x62\x24\x14\xe8\xfd\xbd\xa0\x15\x89\xd7\x59\xe4\xd6\x75\x45\x68\xfd\x70\x69\xbd\x6f\xb9\x98\xbb\x48\xbb\x62\x77\xa6\xe5\xac\x95\x37\x61\xf0\xe3\x83\x1f\x9a\x1b\x4c\xac\x41\xcc\xce\xa5\x6a\x3f\x67\x3a\xa3\x96\xdc\x6b\x39\x21\x69\x16\x7f\x6d\x7a\x74\x13\x61\xb7\x51\x0d\x9c\xc2\x4f\xb8\x53\x2d\xe4\xa8\x51\x30\x76\x4b\x44\x8a\xf6\x5a\xe2\x66\xe6\x19\x41\xcd\x64\xf4\x9e\x15\x22\xca\xc9\x67\xc4\xf8\x95\x2f\x03\x6e\x73\x7a\x99\x7e\x57\xdf\x80\xbf\x66\xea\xda\xef\x90\x39\x0d\x8c\x0a\xf7\x36\x58\x72\x44\xf9\xd0\xe4\x8a\xcd\x08\x40\xc8\x80\xeb\x4e\xd1\x4f\x7f\xb8\xfd\x96\x99\xa6\x0a\xce\xc6\x03\x56\x46\xc8\xf9\x63\x82\xe6\xb1\xfa\x66\x15\x18\x3f\x83\xc2\x52\x19\xde\xdf\xf4\x7a\xa1\xb3\x83\xfa\xfb\xcf\x4b\x30\x61\x29\x88\xb0\x14\x96\x2f\x74\x79\x99\x13\xaf\x03\x30\x60\xc9\x4d\xb2\x07\x25\x37\x84\x58\x67\x11\x73\x39\xaa\x8e\xab\x27\x20\x99\x74\xda\x2d\xfa\x79\x33\x5a\x24\xa2\xb8\x47\x44\xd4\x59\x2e\x6b\x47\xef\x36\x48\xaf\x5a\x2f\x32\x80\x00\xa9\xfe\x52\x69\xb8\xb1\xa1\x7d\xeb\x92\xda\x9e\xdf\x71\x86\xba\x2b\x89\xdb\x8e\x41\x67\x82\x94\x45\xaf\x6c\x08\xc4\xc2\x7c\xa9\x04\xc2\xad\x0f\xb0\x4b\xa1\xca\x88\x5d\x1f\x58\xeb\x67\xc5\x8b\x4a\x0c\xb1\xfb\x86\x24\x97\x51\xcb\x19\x8b\xe9\x5a\xfa\x3c\x3b\x9b\x21\xe1\xda\x41\x58\xd3\x1c\xf9\x6d\xda\xc4\xc0\x0a\x2d\x4f\xcd\x0a\xb6\x32\x85\x28\x3e\xf0\xf3\xae\x6a\x41\x66\x60\xf6\xc4\x86\x86\x5b\x06\x8e\x79\x55\xdd\x52\x32\xd8\x72\xde\xe6\x40\x9b\xf4\x74\xea\x5a\x8c\x01\x7d\xe1\x90\xe8\xcf\xd0\x04\x87\xa3\x64\xdd\x54\xa5\x27\x6d\x95\x14\x16\x62\xe6\x7a\x7d\x57\x92\x1b\x99\x26\xc8\x4e\x5f\xf8\x25\x01\xe0\x3c\xad\xe8\x56\x4c\x9f\x91\xf4\xe8\x74\x5f\x3a\xac\x25\xf5\x5a\x2e\x46\x65\xa3\xb0\xa9\xb0\xef\xd8\xcc\x46\x20\x61\xf8\x4f\x06\x59\xbe\x30\xe2\xdb\xde\x75\x8e\x24\x26\xb9\xa9\x0a\xd1\xbf\xcd\x5a\xbd\xe3\xbd\x72\xce\x34\x98\x04\xc0\x8f\x1f\xe7\xf4\x52\x6a\x42\xde\x7c\x82\xa5\x28\x3a\x58\x8e\x3c\xc9\xc5\xa8\x5b\xf1\x51\x24\xfd\x7a\xed\x99\xfb\x26\x23\xda\xa3\x41\x97\x1c\xf0\xa8\x14\x67\x17\x7a\xa1\x8c\x94\x20\x95\x72\xae\x32\x51\xf7\xf5\xe6\xd2\x66\xa7\xc2\x60\x78\xba\x9b\x82\xe8\x12\xfc\x0c\x69\xaa\x9c\x07\xfb\xf2\x9f\x15\xa8\x24\x99\x0f\x3f\xe0\xcd\x26\x3f\x7e\x82\x59\xcb\x97\x3b\x4f\xbf\xa4\xa9\x0e\xea\x94\xf1\x28\x49\x52\x2d\xa5\x04\x11\x41\xb6\x38\x43\x8d\x41\x16\x26\xbf\xea\xf2\x31\xab\x12\xcb\x32\xd1\xaa\xb8\x19\xcc\xaa\xdd\x8a\x9b\xb9\xc7\x28\x97\x0b\xdc\xb2\xd5\xfe\x9c\xe7\xf1\xaa\xc5\xdc\x1e\x7a\xa8\x50\xd7\xc9\xe4\x65\xf8\x79\x91\x46\xe9\x3b\x4e\x80\xd7\xc6\x50\x6d\xc0\x4a\x03\x32\x89\x1a\x2c\xb6\x55\x72\xe5\x2c\xf0\x1c\xe5\xa5\xd8\xf7\xfc\x1a\xd4\x61\x10\x0b\x1c\xbe\xa6\x56\xfc\x95\x71\x04\xcb\x82\x0a\xf6\x02\xdf\x0e\xe0\xaa\xb4\x74\x80\xa8\x11\xd4\x70\xc5\x68\x99\xc7\xc3\xbd\xb1\x86\x66\xa0\xa0\xc5\x08\x86\x1b\xb5\x93\x7c\x7b\xf5\xeb\x4a\xcc\x46\xa1\x02\xbe\x99\x4b\xf1\x44\x29\x23\xcf\xf5\x8f\xcb\x9d\x3d\x15\x91\x74\xea\xdc\xbc\x8c\x21\x61\x06\x90\x9f\x0c\xbc\x40\x8e\x1a\xda\xab\xa1\xe0\xe7\x90\xd2\x3c\x98\xf0\xae\x06\xc2\x72\x1f\x58\xfd\x5a\xbc\x82\x91\x43\x26\x2a\xb1\x1f\x56\x24\x60\x9c\xee\x3d\xfb\x18\x15\x83\x56\xd5\x31\x6a\x26\xdd\xfa\x36\x51\x9c\x0d\x88\x61\x9a\x09\xe4\x10\xfe\x85\xd8\xa4\xdd\x14\x86\x4b\x99\x13\x4a\x79\xb3\xf8\xfa\x88\xc5\x6d\xe9\xa2\xfb\x52\x13\xcc\x1b\x3d\xdb\x67\x9f\x18\x50\xdf\x86\x76\x60\x74\xaf\x69\x4d\x88\xde\x6e\x9a\xc5\x16\xd5\x7c\x01\x67\x94\x35\x92\xe6\x02\x43\x5d\x4a\xea\x24\xee\x62\x31\x83\xe7\x5e\x7e\x66\xb4\xa7\x43\x26\xe8\xe6\x43\x35\x03\x22\x32\xb1\x68\x80\xf3\x5d\x98\xb1\x5d\x2d\xb6\xfd\x8c\x1c\x82\x7d\x86\x2b\xd2\x3a\xc3\x6b\x34\x23\x77\x24\xb6\x7e\x7e\x0f\xee\x4d\xf3\x50\xe0\xc0\x42\x76\xce\x65\x86\xcc\xf9\x61\xed\x9a\x97\x02\xb7\x6e\x2e\x5f\xd4\x5c\x9b\x18\xe9\x54\x2a\x83\x9a\xc1\x87\xa5\x66\xb1\x39\x66\x52\x0e\xc4\x81\xa0\xe4\x5d\x80\xdc\xdd\x18\x93\xe6\x2c\x57\x58\x64\x5f\x65\x86\xc8\xb1\xeb\x72\x0c\x22\x46\x50\x92\xec\xfa\x71\x9b\xb6\x15\x85\x86\x92\x3a\x65\x5d\x78\x65\x4c\x30\xba\x75\x66\xe6\x9e\xbb\x48\xcf\xd5\x68\x91\xe2\xdd\x7c\xe4\xe3\xa7\xcd\x3a\xec\x3c\xfa\x79\xa7\x2a\xcb\x78\x3a\x8a\x96\xf0\x96\xf9\xa1\x3f\x91\x62\x3c\x5e\x9f\x3c\x6c\x98\x17\x24\x1a\xa5\x97\x28\x35\xa4\x41\x46\x56\xc6\x59\x0f\xe4\x61\x11\xc2\x09\x1e\x4a\xc5\x55\x8f\xe5\xe3\xd9\x06\x70\x7f\xde\xe8\xa1\xcd\x3c\x0c\x4c\xdf\x50\x03\xb3\x17\x21\x77\x2f\x70\x86\x4d\x6a\x78\x19\x09\x2d\x6d\x0c\x8a\x86\x01\x88\xbe\x96\x13\x8a\x4c\x6d\x8f\x44\x91\x02\x9c\x14\xd2\x16\x90\x52\x8a\x4a\xb2\xfe\x9a\xbc\xff\x4c\x7d\xf4\x1e\xde\x23\xd1\xb2\x78\x81\xdd\xd7\x8e\x79\xe6\xe5\x54\xb1\x46\x79\x54\x6c\x76\x26\x32\x5f\xd6\x22\x54\x0b\xb0\xaa\x03\xb6\x1f\xa6\xdc\xa0\x30\x88\xe0\x08\x1f\x6b\x07\x2f\x31\xa3\x62\x8b\x14\xc7\x93\x92\x7c\x36\x48\x32\xf0\xb8\xa2\xac\xfb\x4b\x30\x0a\x55\xe4\x8c\x7a\xf4\x68\xfd\xe6\x76\x74\x86\xc4\x0f\x14\xfc\x28\x05\x1b\x40\xef\xb8\x53\x8a\xc8\x8b\xaa\x56\xdb\x54\x5b\x1e\xdd\x43\x46\x38\xd3\x33\x52\xb6\x8f\xa9\xd9\x84\xd5\x3b\x8e\x3a\x8f\x49\xdb\xc5\x4b\x78\xd0\x9d\x87\x4a\x04\x2b\x36\x0e\x26\x5b\xc1\x77\x46\x93\xed\xc3\xdd\x43\xe1\xeb\x7d\x6d\x7b\x03\xef\x66\x55\x15\x86\x35\x68\x84\x14\xd0\x1c\xf8\x63\x06\xf0\xec\x00\x8a\xc5\x99\xa1\x7a\x01\x04\xe1\xef\x3d\xc6\x90\x67\xe7\x49\xf2\x7b\xe3\x31\x56\x3b\xa7\xb6\x02\xfd\x8b\xdf\x9f\xcf\x78\xd3\x8c\x73\xa6\x13\xe7\xc9\x4a\xbc\x3b\x04\xaf\xc3\xef\x27\x4b\xa1\xc6\x86\x6c\xa3\x6d\x44\x98\x8c\xef\x5e\x58\x18\x3d\xff\x30\xb9\x7f\xa8\xd9\x10\xe1\x52\x11\x94\xae\x0e\x5b\xd7\x0e\xc2\x0c\xbf\x46\xf6\x37\x49\x93\xc3\x5d\x40\x73\x05\xd6\x6e\x4f\x61\x58\x9a\x83\xa5\xd1\x16\xf1\xc8\xbf\xc4\x8d\xd7\x21\xeb\x6d\x7b\x5f\xaa\xdb\xf4\x65\x99\xac\xb9\xdf\xed\x6b\x21\x68\x3c\x38\x44\x2b\x86\xc7\x54\x29\xd4\xc7\x26\x0a\xe0\x61\xce\xb8\x7b\x1b\xff\xf9\x20\x26\x7d\x5e\x56\x36\x48\xbe\x2c\x87\x05\x8a\x79\x98\x98\x13\x1c\x1a\x4e\x2a\xa8\x46\xf5\xe3\x8e\xcd\x89\x31\x13\xce\xdb\x79\x00\x3f\x4a\x20\xfd\x60\x64\x19\xcf\x51\x80\x7b\x3d\xdb\xa4\x2a\x89\xdd\xb4\x3e\x8b\xad\x42\x00\x17\x00\xde\xc6\x69\x3d\xcb\x48\xf4\x38\x1c\x1c\x37\xf0\x50\xa2\x99\xba\x15\xb8\x09\x87\x2d\x11\x96\x49\xc2\xe1\x70\x27\x6b\x3a\x35\x3a\x9c\xb9\x75\x25\x8d\x39\xaa\xc2\xdc\xa1\xe7\xd3\xab\xb7\x30\xee\x3d\xf6\x5e\x87\xf7\x34\x15\xfe\x46\xa7\x03\x03\x3e\xe2\x84\xbc\x23\x50\xf1\x0d\x43\x99\x8c\xed\x2c\x7b\x01\x47\x7b\x6b\xbd\x53\x7f\xc8\x1e\x52\x34\xed\x59\x19\x4e\x9d\xfa\x42\x2c\xf3\x92\x36\xb4\x24\x4b\xd7\xfc\x85\x2d\x19\xa9\x82\x8a\x1b\xb2\xa6\x07\xf4\x43\x24\x91\x31\x4d\xeb\x08\xb6\x7d\x29\x50\x56\x2c\x25\x09\x71\x3d\xfc\x1b\xdb\x4e\x47\xdb\x0c\xba\x99\xd1\xe6\x20\x55\x34\xe9\x41\x68\x3e\x34\xb6\x43\xbf\x58\x55\xf8\x85\xfa\x61\xd1\x76\x09\x7a\xd1\xff\x31\xf3\x31\xc3\xdd\x9e\xf9\x63\x07\x88\xdb\x06\xff\x69\xc3\xf5\xf7\xe9\x78\x48\x4f\x58\x5b\xdb\x7e\xcb\x62\xb1\x56\x52\x11\xe3\x73\x25\x47\x47\x70\x0f\x71\x92\x71\x45\x45\xcd\xbf\x3e\xae\x7a\x5e\x8d\x49\x47\x5e\x22\xf5\x3e\x2a\xdb\x3d\x35\xfb\x52\xea\x04\x27\x8e\xe7\x16\x21\x73\x3d\x47\xde\x37\xc2\xaf\x14\x15\xec\x84\x54\x87\x9d\x9f\x3f\x1a\x1e\x23\xa9\x2c\x58\x2c\xa4\x1f\x46\xbe\xd9\xe6\x83\xf5\xca\xb8\xa8\x77\xc8\x52\x02\x2e\xeb\xe5\x44\x39\x7a\x73\x3e\x26\x7f\x67\x05\xf8\xbf\xc0\x07\xeb\x5f\x38\xe8\x1c\x94\x58\xb3\xc4\xc1\xf0\x3e\x71\x29\x4a\x16\x89\x70\x1b\x3e\x03\x75\xa0\xdf\xe3\xdd\x4d\x97\x62\xba\x43\xe9\x48\xfa\x08\x84\xf9\x0f\x3a\x20\xda\xed\x23\x00\xf2\x96\xa7\x07\x87\x67\xff\xae\x01\xf3\x95\x5c\xe1\x35\x4c\x16\xeb\x98\x9d\xa5\x1a\x1d\x64\x4a\x80\x13\x89\x21\xca\xbb\x3a\x98\x27\x62\x06\x8f\x7d\xb4\x6f\x47\x85\x20\xcc\x94\x13\x57\x29\x95\xb8\x5e\x66\x35\x9b\xd7\x2f\xb6\xc5\x6a\x8c\x57\x3c\x0d\x28\x99\x0a\xa2\x2f\xeb\xb7\xdf\x57', 1)
| 45,879
| 45,879
| 0.749995
| 11,465
| 45,879
| 3.000174
| 0.022765
| 0.000698
| 0.000523
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.311688
| 0.000065
| 45,879
| 1
| 45,879
| 45,879
| 0.438094
| 0
| 0
| 0
| 0
| 1
| 0.999128
| 0.999128
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
581f3b5ce9f6369e45c63e05e51edc334e2df31e
| 403
|
py
|
Python
|
tigercontrol/utils/optimizers/__init__.py
|
MinRegret/TigerControl
|
b1ca0617cbb2198f9d5cb37f725f3d7accbab08f
|
[
"Apache-2.0"
] | 31
|
2019-11-08T06:01:54.000Z
|
2021-11-20T04:50:43.000Z
|
tigercontrol/utils/optimizers/__init__.py
|
johnhallman/ctsb
|
b1ca0617cbb2198f9d5cb37f725f3d7accbab08f
|
[
"Apache-2.0"
] | 32
|
2019-06-27T15:05:04.000Z
|
2019-08-07T04:23:47.000Z
|
tigercontrol/utils/optimizers/__init__.py
|
MinRegret/tigercontrol
|
b1ca0617cbb2198f9d5cb37f725f3d7accbab08f
|
[
"Apache-2.0"
] | 3
|
2020-09-30T17:06:50.000Z
|
2021-04-12T22:39:34.000Z
|
# controllers/optimizers init file
from tigercontrol.utils.optimizers.core import Optimizer
from tigercontrol.utils.optimizers.sgd import SGD
from tigercontrol.utils.optimizers.ogd import OGD
from tigercontrol.utils.optimizers.ons import ONS
from tigercontrol.utils.optimizers.adam import Adam
from tigercontrol.utils.optimizers.adagrad import Adagrad
from tigercontrol.utils.optimizers.losses import *
| 44.777778
| 57
| 0.861042
| 52
| 403
| 6.673077
| 0.307692
| 0.322767
| 0.423631
| 0.62536
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079404
| 403
| 9
| 58
| 44.777778
| 0.93531
| 0.079404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
587174300d49a210bd451506082d9911142e73df
| 5,218
|
py
|
Python
|
client/verta/verta/_swagger/_public/uac/api/CollaboratorApi.py
|
CaptEmulation/modeldb
|
78b10aca553e386554f9740db63466b1cf013a1a
|
[
"Apache-2.0"
] | 835
|
2017-02-08T20:14:24.000Z
|
2020-03-12T17:37:49.000Z
|
client/verta/verta/_swagger/_public/uac/api/CollaboratorApi.py
|
CaptEmulation/modeldb
|
78b10aca553e386554f9740db63466b1cf013a1a
|
[
"Apache-2.0"
] | 651
|
2019-04-18T12:55:07.000Z
|
2022-03-31T23:45:09.000Z
|
client/verta/verta/_swagger/_public/uac/api/CollaboratorApi.py
|
CaptEmulation/modeldb
|
78b10aca553e386554f9740db63466b1cf013a1a
|
[
"Apache-2.0"
] | 170
|
2017-02-13T14:49:22.000Z
|
2020-02-19T17:59:12.000Z
|
# THIS FILE IS AUTO-GENERATED. DO NOT EDIT
class CollaboratorApi:
def __init__(self, client, base_path = "/v1"):
self.client = client
self.base_path = base_path
def addOrUpdateDatasetCollaborator(self, body=None):
__query = {
}
if body is None:
raise Exception("Missing required parameter \"body\"")
format_args = {}
path = "/collaborator/addOrUpdateDatasetCollaborator"
if "$body" in path:
path = path.replace("$body", "%(body)s")
format_args["body"] = body
ret = self.client.request("POST", self.base_path + path % format_args, __query, body)
if ret is not None:
from ..model.UacAddCollaboratorRequestResponse import UacAddCollaboratorRequestResponse
ret = UacAddCollaboratorRequestResponse.from_json(ret)
return ret
def addOrUpdateProjectCollaborator(self, body=None):
__query = {
}
if body is None:
raise Exception("Missing required parameter \"body\"")
format_args = {}
path = "/collaborator/addOrUpdateProjectCollaborator"
if "$body" in path:
path = path.replace("$body", "%(body)s")
format_args["body"] = body
ret = self.client.request("POST", self.base_path + path % format_args, __query, body)
if ret is not None:
from ..model.UacAddCollaboratorRequestResponse import UacAddCollaboratorRequestResponse
ret = UacAddCollaboratorRequestResponse.from_json(ret)
return ret
def getDatasetCollaborators(self, entity_id=None):
__query = {
"entity_id": client.to_query(entity_id)
}
body = None
format_args = {}
path = "/collaborator/getDatasetCollaborators"
if "$entity_id" in path:
path = path.replace("$entity_id", "%(entity_id)s")
format_args["entity_id"] = entity_id
ret = self.client.request("GET", self.base_path + path % format_args, __query, body)
if ret is not None:
from ..model.UacGetCollaboratorResponse import UacGetCollaboratorResponse
ret = UacGetCollaboratorResponse.from_json(ret)
return ret
def getProjectCollaborators(self, entity_id=None):
__query = {
"entity_id": client.to_query(entity_id)
}
body = None
format_args = {}
path = "/collaborator/getProjectCollaborators"
if "$entity_id" in path:
path = path.replace("$entity_id", "%(entity_id)s")
format_args["entity_id"] = entity_id
ret = self.client.request("GET", self.base_path + path % format_args, __query, body)
if ret is not None:
from ..model.UacGetCollaboratorResponse import UacGetCollaboratorResponse
ret = UacGetCollaboratorResponse.from_json(ret)
return ret
def removeDatasetCollaborator(self, entity_id=None, share_with=None, date_deleted=None, authz_entity_type=None):
__query = {
"entity_id": client.to_query(entity_id),
"share_with": client.to_query(share_with),
"date_deleted": client.to_query(date_deleted),
"authz_entity_type": client.to_query(authz_entity_type)
}
body = None
format_args = {}
path = "/collaborator/removeDatasetCollaborator"
if "$entity_id" in path:
path = path.replace("$entity_id", "%(entity_id)s")
format_args["entity_id"] = entity_id
if "$share_with" in path:
path = path.replace("$share_with", "%(share_with)s")
format_args["share_with"] = share_with
if "$date_deleted" in path:
path = path.replace("$date_deleted", "%(date_deleted)s")
format_args["date_deleted"] = date_deleted
if "$authz_entity_type" in path:
path = path.replace("$authz_entity_type", "%(authz_entity_type)s")
format_args["authz_entity_type"] = authz_entity_type
ret = self.client.request("DELETE", self.base_path + path % format_args, __query, body)
if ret is not None:
from ..model.UacRemoveCollaboratorResponse import UacRemoveCollaboratorResponse
ret = UacRemoveCollaboratorResponse.from_json(ret)
return ret
def removeProjectCollaborator(self, entity_id=None, share_with=None, date_deleted=None, authz_entity_type=None):
__query = {
"entity_id": client.to_query(entity_id),
"share_with": client.to_query(share_with),
"date_deleted": client.to_query(date_deleted),
"authz_entity_type": client.to_query(authz_entity_type)
}
body = None
format_args = {}
path = "/collaborator/removeProjectCollaborator"
if "$entity_id" in path:
path = path.replace("$entity_id", "%(entity_id)s")
format_args["entity_id"] = entity_id
if "$share_with" in path:
path = path.replace("$share_with", "%(share_with)s")
format_args["share_with"] = share_with
if "$date_deleted" in path:
path = path.replace("$date_deleted", "%(date_deleted)s")
format_args["date_deleted"] = date_deleted
if "$authz_entity_type" in path:
path = path.replace("$authz_entity_type", "%(authz_entity_type)s")
format_args["authz_entity_type"] = authz_entity_type
ret = self.client.request("DELETE", self.base_path + path % format_args, __query, body)
if ret is not None:
from ..model.UacRemoveCollaboratorResponse import UacRemoveCollaboratorResponse
ret = UacRemoveCollaboratorResponse.from_json(ret)
return ret
| 37.539568
| 114
| 0.692603
| 625
| 5,218
| 5.5056
| 0.096
| 0.074397
| 0.069747
| 0.048823
| 0.875036
| 0.875036
| 0.874164
| 0.874164
| 0.874164
| 0.874164
| 0
| 0.000237
| 0.191836
| 5,218
| 138
| 115
| 37.811594
| 0.815746
| 0.007666
| 0
| 0.813559
| 1
| 0
| 0.193006
| 0.054482
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059322
| false
| 0
| 0.050847
| 0
| 0.169492
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5879c70611b7b93a670b6478e181a6a55235753b
| 188
|
py
|
Python
|
src/get_response/__init__.py
|
skvozsneg/api-response
|
b81998da9dec08c7c55a0f7c952453695e135ce4
|
[
"MIT"
] | 1
|
2022-02-11T12:47:37.000Z
|
2022-02-11T12:47:37.000Z
|
src/get_response/__init__.py
|
skvozsneg/get-response
|
b81998da9dec08c7c55a0f7c952453695e135ce4
|
[
"MIT"
] | null | null | null |
src/get_response/__init__.py
|
skvozsneg/get-response
|
b81998da9dec08c7c55a0f7c952453695e135ce4
|
[
"MIT"
] | null | null | null |
from get_response.interface import get_response
from get_response.interface import get_json
from get_response.interface import get_soap
__all__ = ['get_response', 'get_json', 'get_soap']
| 31.333333
| 50
| 0.829787
| 28
| 188
| 5.107143
| 0.285714
| 0.384615
| 0.314685
| 0.503497
| 0.692308
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095745
| 188
| 5
| 51
| 37.6
| 0.841176
| 0
| 0
| 0
| 0
| 0
| 0.148936
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
544ab9c9b220c9c0e107a9497ada5a9088c8e612
| 1,896
|
py
|
Python
|
tests/test_range.py
|
sgrah-oss/haystackapi
|
dc6000120e5ef97b174bb1440460ce170f22026e
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
tests/test_range.py
|
sgrah-oss/haystackapi
|
dc6000120e5ef97b174bb1440460ce170f22026e
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
tests/test_range.py
|
sgrah-oss/haystackapi
|
dc6000120e5ef97b174bb1440460ce170f22026e
|
[
"BSD-2-Clause",
"Apache-2.0"
] | null | null | null |
from datetime import datetime, date, timedelta
import pytz
from haystackapi import parse_date_range
def test_date_range_empty():
date_min, date_max = parse_date_range("", pytz.UTC)
assert date_min == datetime.min.replace(tzinfo=pytz.UTC)
assert date_max == datetime.max.replace(tzinfo=pytz.UTC)
def test_date_range_today():
date_min, date_max = parse_date_range("today", pytz.UTC)
assert date_min == datetime.combine(date.today(), datetime.min.time()) \
.replace(tzinfo=pytz.UTC)
assert date_max == date_min + timedelta(days=1, milliseconds=-1)
def test_date_range_yesterday():
date_min, date_max = parse_date_range("yesterday", pytz.UTC)
assert date_min == datetime.combine(date.today() - timedelta(days=1), datetime.min.time()) \
.replace(tzinfo=pytz.UTC)
assert date_max == date_min + timedelta(days=1, milliseconds=-1)
def test_date_range_date():
date_min, date_max = parse_date_range("2020-12-24", pytz.UTC)
assert date_min == datetime(2020, 12, 24).replace(tzinfo=pytz.UTC)
assert date_max == date_min + timedelta(days=1, milliseconds=-1)
def test_date_range_date_date():
date_min, date_max = parse_date_range("2020-12-24,2020-12-25", pytz.UTC)
assert date_min == datetime(2020, 12, 24).replace(tzinfo=pytz.UTC)
assert date_max == datetime(2020, 12, 25).replace(tzinfo=pytz.UTC)
def test_date_range_datetime():
date_min, date_max = parse_date_range("2020-12-24T00:00:00+00:00", pytz.UTC)
assert date_min == datetime(2020, 12, 24, tzinfo=pytz.UTC)
assert date_max == date_min + timedelta(days=1, milliseconds=-1)
def test_date_range_datetime_datetime():
date_min, date_max = parse_date_range("2020-12-24T00:00:00+00:00,2020-12-25T00:00:00+00:00", pytz.UTC)
assert date_min == datetime(2020, 12, 24, tzinfo=pytz.UTC)
assert date_max == datetime(2020, 12, 25, tzinfo=pytz.UTC)
| 37.92
| 106
| 0.722046
| 298
| 1,896
| 4.355705
| 0.107383
| 0.097072
| 0.140216
| 0.183359
| 0.857473
| 0.85131
| 0.829738
| 0.765023
| 0.677966
| 0.610169
| 0
| 0.082924
| 0.14135
| 1,896
| 49
| 107
| 38.693878
| 0.714373
| 0
| 0
| 0.30303
| 0
| 0.030303
| 0.063819
| 0.05116
| 0
| 0
| 0
| 0
| 0.424242
| 1
| 0.212121
| true
| 0
| 0.090909
| 0
| 0.30303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
5477b42ad1d4ee34b0546fc3f14274609066d359
| 1,188
|
py
|
Python
|
tests/test_rst_list.py
|
macfreek/restbuilder
|
1492b304b958ba687da8ab148f23f4aa0b699479
|
[
"BSD-2-Clause"
] | 23
|
2018-06-06T13:20:08.000Z
|
2022-02-10T07:21:14.000Z
|
tests/test_rst_list.py
|
macfreek/restbuilder
|
1492b304b958ba687da8ab148f23f4aa0b699479
|
[
"BSD-2-Clause"
] | 23
|
2018-03-19T15:56:31.000Z
|
2022-02-09T13:06:55.000Z
|
tests/test_rst_list.py
|
macfreek/restbuilder
|
1492b304b958ba687da8ab148f23f4aa0b699479
|
[
"BSD-2-Clause"
] | 22
|
2018-05-21T17:16:33.000Z
|
2022-02-17T05:44:31.000Z
|
from tests.utils import run_parse_test
import pytest
def test_bullet_list(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['bullet-list'])
def test_ordered_list(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['ordered-list'])
def test_nested_list(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['nested-list'])
def test_multiline_list(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['multiline-list'])
@pytest.mark.skip(reason="work in progress")
def test_ordered_list_properties(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['ordered-list-properties'])
@pytest.mark.skip(reason="work in progress")
def test_bullet_list_consecutive(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['bullet-list-consecutive'])
def test_definition_list(src_dir, expected_dir, output_dir):
run_parse_test(src_dir, expected_dir, output_dir, 'common', ['definition-list'])
| 34.941176
| 92
| 0.77862
| 181
| 1,188
| 4.701657
| 0.154696
| 0.098707
| 0.230317
| 0.279671
| 0.745006
| 0.745006
| 0.745006
| 0.745006
| 0.745006
| 0.648649
| 0
| 0
| 0.099327
| 1,188
| 33
| 93
| 36
| 0.795327
| 0
| 0
| 0.111111
| 0
| 0
| 0.15404
| 0.038721
| 0
| 0
| 0
| 0
| 0
| 1
| 0.388889
| false
| 0
| 0.111111
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5498d5109f3e89f95857705f8f3eed7ca6b2609c
| 2,157
|
py
|
Python
|
cryptocmp/api/price/test_single.py
|
OkThought/cryptocmp
|
85519fec77b271b9608098395505d5e546427909
|
[
"MIT"
] | null | null | null |
cryptocmp/api/price/test_single.py
|
OkThought/cryptocmp
|
85519fec77b271b9608098395505d5e546427909
|
[
"MIT"
] | 1
|
2018-08-14T11:55:44.000Z
|
2018-08-14T11:56:28.000Z
|
cryptocmp/api/price/test_single.py
|
OkThought/cryptocmp
|
85519fec77b271b9608098395505d5e546427909
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
import cryptocmp.api.price.single
class SinglePriceTestCase(TestCase):
def test_single(self):
data = cryptocmp.api.price.single.get('BTC', 'USD')
self.assertIsInstance(data, dict)
self.assertIn('USD', data)
del data['USD']
self.assertDictEqual(dict(), data,
'result contains more than requested')
def test_single_tuple(self):
data = cryptocmp.api.price.single.get('BTC', ('USD',))
self.assertIsInstance(data, dict)
self.assertIn('USD', data)
del data['USD']
self.assertDictEqual(dict(), data,
'result contains more than requested')
def test_single_list(self):
data = cryptocmp.api.price.single.get('BTC', ['USD'])
self.assertIsInstance(data, dict)
self.assertIn('USD', data)
del data['USD']
self.assertDictEqual(dict(), data,
'result contains more than requested')
def test_multiple_comma_separated_string(self):
data = cryptocmp.api.price.single.get('BTC', 'USD,EUR')
self.assertIsInstance(data, dict)
self.assertIn('USD', data)
self.assertIn('EUR', data)
del data['USD']
del data['EUR']
self.assertDictEqual(dict(), data,
'result contains more than requested')
def test_multiple_tuple(self):
data = cryptocmp.api.price.single.get('BTC', ('USD', 'EUR'))
self.assertIsInstance(data, dict)
self.assertIn('USD', data)
self.assertIn('EUR', data)
del data['USD']
del data['EUR']
self.assertDictEqual(dict(), data,
'result contains more than requested')
def test_multiple_list(self):
data = cryptocmp.api.price.single.get('BTC', ['USD', 'EUR'])
self.assertIsInstance(data, dict)
self.assertIn('USD', data)
self.assertIn('EUR', data)
del data['USD']
del data['EUR']
self.assertDictEqual(dict(), data,
'result contains more than requested')
| 36.559322
| 68
| 0.578581
| 235
| 2,157
| 5.255319
| 0.144681
| 0.087449
| 0.096356
| 0.130364
| 0.902834
| 0.902834
| 0.902834
| 0.902834
| 0.902834
| 0.902834
| 0
| 0
| 0.291609
| 2,157
| 58
| 69
| 37.189655
| 0.808246
| 0
| 0
| 0.705882
| 0
| 0
| 0.143718
| 0
| 0
| 0
| 0
| 0
| 0.411765
| 1
| 0.117647
| false
| 0
| 0.039216
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
49a50f2ab227949b5e8fd2d5f1d7a2334dcecdc1
| 2,728
|
py
|
Python
|
ksteta3pi/MC_12_11164001_MagUp.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
ksteta3pi/MC_12_11164001_MagUp.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
ksteta3pi/MC_12_11164001_MagUp.py
|
Williams224/davinci-scripts
|
730642d2ff13543eca4073a4ce0932631195de56
|
[
"MIT"
] | null | null | null |
#-- GAUDI jobOptions generated on Mon Jul 27 17:43:48 2015
#-- Contains event types :
#-- 11164001 - 17 files - 263059 events - 73.49 GBytes
#-- Extra information about the data processing phases:
#-- Processing Pass Step-125836
#-- StepId : 125836
#-- StepName : Stripping20-NoPrescalingFlagged for Sim08 - Implicit merging.
#-- ApplicationName : DaVinci
#-- ApplicationVersion : v32r2p1
#-- OptionFiles : $APPCONFIGOPTS/DaVinci/DV-Stripping20-Stripping-MC-NoPrescaling.py;$APPCONFIGOPTS/DaVinci/DataType-2012.py;$APPCONFIGOPTS/DaVinci/InputType-DST.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r164
#-- Visible : Y
#-- Processing Pass Step-127420
#-- StepId : 127420
#-- StepName : Reco14c for MC - 2012
#-- ApplicationName : Brunel
#-- ApplicationVersion : v43r2p10
#-- OptionFiles : $APPCONFIGOPTS/Brunel/DataType-2012.py;$APPCONFIGOPTS/Brunel/MC-WithTruth.py;$APPCONFIGOPTS/Persistency/Compression-ZLIB-1.py
#-- DDDB : fromPreviousStep
#-- CONDDB : fromPreviousStep
#-- ExtraPackages : AppConfig.v3r207
#-- Visible : Y
from Gaudi.Configuration import *
from GaudiConf import IOHelper
IOHelper('ROOT').inputFiles(['LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000001_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000002_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000003_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000004_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000005_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000006_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000007_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000008_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000009_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000010_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000011_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000012_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000014_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000015_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000016_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000017_2.AllStreams.dst',
'LFN:/lhcb/MC/2012/ALLSTREAMS.DST/00044504/0000/00044504_00000018_2.AllStreams.dst'
], clear=True)
| 50.518519
| 215
| 0.778226
| 355
| 2,728
| 5.884507
| 0.292958
| 0.211584
| 0.073241
| 0.105792
| 0.584969
| 0.584969
| 0.584969
| 0.584969
| 0.584969
| 0.562949
| 0
| 0.259905
| 0.07478
| 2,728
| 53
| 216
| 51.471698
| 0.56775
| 0.409091
| 0
| 0
| 1
| 0.85
| 0.870195
| 0.867675
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.1
| 0
| 0.1
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49c2ab41493f7232584354aa1da9f71ce7c6a816
| 27,067
|
py
|
Python
|
tests/test_04_management.py
|
biotech2021/uniTicket
|
8c441eac18e67a983e158326b1c4b82f00f1f1ef
|
[
"Apache-2.0"
] | 15
|
2019-09-06T06:47:08.000Z
|
2022-01-17T06:39:54.000Z
|
tests/test_04_management.py
|
biotech2021/uniTicket
|
8c441eac18e67a983e158326b1c4b82f00f1f1ef
|
[
"Apache-2.0"
] | 69
|
2019-09-06T12:03:19.000Z
|
2022-03-26T14:30:53.000Z
|
tests/test_04_management.py
|
biotech2021/uniTicket
|
8c441eac18e67a983e158326b1c4b82f00f1f1ef
|
[
"Apache-2.0"
] | 13
|
2019-09-11T10:54:20.000Z
|
2021-11-23T09:09:19.000Z
|
import logging
from django.urls import reverse
from uni_ticket.models import *
from uni_ticket.urls import *
from uni_ticket.utils import *
from . base_ticket_env import BaseTicketEnvironment
logger = logging.getLogger('my_logger')
class Test_ManagementFunctions(BaseTicketEnvironment):
def setUp(self):
super().setUp()
self.structure_1_manager_login()
# Create Office 1
# Create a new office in Structure 1
off_name = 'New Office'
params = {'name': off_name,
'description': 'Description new office'}
response = self.client.post(reverse('uni_ticket:manager_office_add_new',
kwargs={'structure_slug': self.structure_1.slug,}),
params,
follow=True)
assert response.status_code == 200
self.office_1 = OrganizationalStructureOffice.objects.get(name=off_name)
def test_tickets(self):
# Tickets list
response = self.client.post(reverse('uni_ticket:manager_tickets',
kwargs={'structure_slug': self.structure_1.slug}),
follow=True)
assert response.status_code == 200
assert response.context['ticket_non_gestiti'] > 0
# assert self.ticket in response.context['ticket_non_gestiti']
def test_take_ticket_and_test(self):
# Take ticket
assignment = TicketAssignment.objects.filter(ticket=self.ticket,
taken_date__isnull=True,
office__organizational_structure=self.structure_1).first()
params = {'priority': 0, 'office': assignment.office}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
# Submit message (only if the ticket is open)
subject = 'Ticket 1 message'
params = {'subject': subject,
'text': 'Ticket message'}
response = self.client.post(reverse('uni_ticket:ticket_message',
kwargs={'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
message = TicketReply.objects.filter(ticket=self.ticket,
owner=self.staff_1).first()
assert message
# Get ticket messages
response = self.client.get(reverse('uni_ticket:messages'),
follow=True)
assert response.status_code == 200
assert response.context['ticket_messages']
# Submit message (fails until ticket is not taken)
response = self.client.get(reverse('uni_ticket:message_delete',
kwargs={'ticket_message_id': message.pk}),
follow=True)
assert response.status_code == 200
self.assertFalse(TicketReply.objects.filter(ticket=self.ticket,
owner=self.staff_1))
# Delete ticket
# Fails, ticket is taken
response = self.client.get(reverse('uni_ticket:ticket_delete',
kwargs={'ticket_id': self.ticket.code}),
follow=True)
assert response.status_code == 200
assert self.ticket
# Close ticket
params = {'note': "notes",
'status': 1}
response = self.client.post(reverse('uni_ticket:manager_close_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
assert self.ticket.is_closed
# Reopen ticket
response = self.client.get(reverse('uni_ticket:reopen_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
self.assertFalse(self.ticket.is_closed)
def test_category_field_edit(self):
# Edit input module field
# This fails, because a ticket exists with this input module
field_name = 'file_field_1 edited'
params = {'name': field_name,
'field_type': 'CustomFileField',
'is_required': False}
response = self.client.post(reverse('uni_ticket:manager_category_input_field_edit',
kwargs={'structure_slug': self.structure_1.slug,
'category_slug': self.category_1_str_1.slug,
'module_id': self.module_2.pk,
'field_id': self.input_field.pk}),
params,
follow=True)
assert response.status_code == 200
self.input_field.refresh_from_db()
self.assertFalse(self.input_field.name == field_name)
def test_category_field_remove(self):
# Remove field
# This fails, because a ticket exists with this input module
response = self.client.get(reverse('uni_ticket:manager_category_input_field_delete',
kwargs={'structure_slug': self.structure_1.slug,
'category_slug': self.category_1_str_1.slug,
'module_id': self.module_2.pk,
'field_id': self.input_field.pk}),
follow=True)
assert response.status_code == 200
assert TicketCategoryInputList.objects.filter(category_module=self.module_2).first()
def test_add_field_to_input_module(self):
# Add file field to input module
# Fails, it has a ticket linked to
field_name = 'file_field_2'
params = {'name': field_name,
'field_type': 'CustomFileField',
'is_required': False}
response = self.client.post(reverse('uni_ticket:manager_category_input_module',
kwargs={'structure_slug': self.structure_1.slug,
'category_slug': self.category_1_str_1.slug,
'module_id': self.module_2.pk}),
params,
follow=True)
assert response.status_code == 200
self.assertFalse(TicketCategoryInputList.objects.filter(category_module=self.module_2,
name=field_name).first())
def test_add_ticket_competence_and_manage(self):
# Take ticket
assignment = TicketAssignment.objects.filter(ticket=self.ticket,
taken_date__isnull=True,
office__organizational_structure=self.structure_1).first()
params = {'priority': 2, 'office': assignment.office}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
assert self.ticket.priority == 2
assert self.ticket.has_been_taken()
# Select categories of Structure 2
response = self.client.get(reverse('uni_ticket:manager_add_ticket_competence',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
follow=True)
self.ticket.refresh_from_db()
assert response.status_code == 200
# Assign ticket to Office (Structure 2)
# Follow and continue to manage ticket (staff_1, manager of Structure 1)
params = {'office_slug': self.office_1_str_2.slug,
'follow': 'on',
# 'readonly': False,
}
response = self.client.post(reverse('uni_ticket:manager_add_ticket_competence',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'new_structure_slug': self.structure_2.slug}),
params,
follow=True)
assert response.status_code == 200
assert self.ticket.code in TicketAssignment.get_ticket_per_structure(self.structure_1)
assert self.ticket.code in TicketAssignment.get_ticket_per_structure(self.structure_2)
# Change priority to ticket
params = {'priorita': 1}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
assert self.ticket.priority == 1
# Structure 2 default office operator login
self.structure_2_default_office_operator_login()
# Change priority to ticket (fails because User_2 hasn't privileges on Structure_1)
params = {'priorita': -1}
response = self.client.post(reverse('uni_ticket:operator_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
assert not self.ticket.priority == -1
# Change priority to ticket (fails, operator must take ticket first!)
params = {'priorita': -1}
response = self.client.post(reverse('uni_ticket:operator_manage_ticket',
kwargs={'structure_slug': self.structure_2.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
assert not self.ticket.priority == -1
# take ticket
response = self.client.get(reverse('uni_ticket:ticket_taken_by_unassigned_offices',
kwargs={'structure_slug': self.structure_2.slug,
'ticket_id': self.ticket.code}),
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
# Change priority to ticket (success!)
params = {'priorita': -1}
response = self.client.post(reverse('uni_ticket:operator_manage_ticket',
kwargs={'structure_slug': self.structure_2.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
assert self.ticket.priority == -1
# Remove competence
leave_office = OrganizationalStructureOffice.objects.filter(organizational_structure=self.structure_2,
name="Office 1 Stucture 2").first()
params = {'office': leave_office.slug}
response = self.client.post(reverse('uni_ticket:leave_ticket_competence',
kwargs={'structure_slug': self.structure_2.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
self.ticket.refresh_from_db()
assert response.status_code == 200
assert self.ticket.code in TicketAssignment.get_ticket_per_structure(self.structure_1)
assert not self.ticket.code in TicketAssignment.get_ticket_per_structure(self.structure_2)
def test_add_ticket_competence_and_readonly(self):
# Take ticket
assignment = TicketAssignment.objects.filter(ticket=self.ticket,
taken_date__isnull=True,
office__organizational_structure=self.structure_1).first()
params = {'priority': 2, 'office': assignment.office}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
self.ticket.refresh_from_db()
assert self.ticket.priority == 2
assert self.ticket.has_been_taken()
# Select categories of Structure 2
response = self.client.get(reverse('uni_ticket:manager_add_ticket_competence',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
follow=True)
self.ticket.refresh_from_db()
assert response.status_code == 200
# Assign ticket to Category_3 (Structure 2)
# Follow and continue to manage ticket (staff_1, manager of Structure 1)
params = {'office_slug': self.office_1_str_2.slug,
'follow': 'on',
'readonly': True,
}
response = self.client.post(reverse('uni_ticket:manager_add_ticket_competence',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'new_structure_slug': self.structure_2.slug}),
params,
follow=True)
assert response.status_code == 200
assert self.ticket.code in TicketAssignment.get_ticket_per_structure(self.structure_1)
assert self.ticket.code in TicketAssignment.get_ticket_per_structure(self.structure_2)
# Change priority to ticket (this fails, is readonly!)
params = {'priorita': 1}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
self.ticket.refresh_from_db()
assert not self.ticket.priority == 1
def test_ticket_dependence(self):
# Take ticket
assignment = TicketAssignment.objects.filter(ticket=self.ticket,
taken_date__isnull=True,
office__organizational_structure=self.structure_1).first()
params = {'priority': 0, 'office': assignment.office}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
assignment_2 = TicketAssignment.objects.filter(ticket=self.ticket_2,
taken_date__isnull=True,
office__organizational_structure=self.structure_1).first()
params = {'priority': 2, 'office': assignment_2.office}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket_2.code}),
params,
follow=True)
assert response.status_code == 200
self.ticket_2.refresh_from_db()
assert self.ticket_2.priority == 2
params = {'ticket': self.ticket_2.code,
'note': "Il ticket 1 dipende dal ticket 2"}
response = self.client.post(reverse('uni_ticket:manager_add_ticket_dependence',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
t2t = Ticket2Ticket.objects.filter(subordinate_ticket=self.ticket,
main_ticket=self.ticket_2).first()
assert t2t
response = self.client.get(reverse('uni_ticket:remove_ticket_dependence',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'main_ticket_id': self.ticket_2.code}),
follow=True)
assert response.status_code == 200
t2t = Ticket2Ticket.objects.filter(subordinate_ticket=self.ticket,
main_ticket=self.ticket_2)
self.assertFalse(t2t)
def test_ticket_message(self):
# Take ticket
assignment = TicketAssignment.objects.filter(ticket=self.ticket,
taken_date__isnull=True,
office__organizational_structure=self.structure_1).first()
params = {'priority': 0, 'office': assignment.office}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
# Submit message (fails until ticket is not taken)
subject = 'Ticket 1 message from manager'
params = {'subject': subject,
'text': 'Ticket message'}
response = self.client.post(reverse('uni_ticket:manager_ticket_message',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
assert TicketReply.objects.filter(ticket=self.ticket,
owner=self.staff_1,
subject=subject)
def test_task(self):
# Take ticket
assignment = TicketAssignment.objects.filter(ticket=self.ticket,
taken_date__isnull=True,
office__organizational_structure=self.structure_1).first()
params = {'priority': 0, 'office': assignment.office}
response = self.client.post(reverse('uni_ticket:manager_manage_ticket',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
# Create new task
subject = 'Ticket 1 task 1'
params = {'subject': subject,
'description': "Task 1 description",
'priority': 1}
response = self.client.post(reverse('uni_ticket:manager_add_ticket_task',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code}),
params,
follow=True)
assert response.status_code == 200
task = Task.objects.filter(ticket=self.ticket,
subject=subject,
priority=1).first()
assert task
# Edit task priority
params = {'priorita': 2}
response = self.client.post(reverse('uni_ticket:manager_task_detail',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'task_id': task.code}),
params,
follow=True)
assert response.status_code == 200
task.refresh_from_db()
assert task.priority == 2
# Edit task
attachment = self.create_fake_file()
subject = "Ticket 1 task edited"
params = {'subject': subject,
'description': "new descr",
'priority': -1,
'attachment': attachment}
response = self.client.post(reverse('uni_ticket:manager_edit_task',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'task_id': task.code}),
params,
follow=True)
assert response.status_code == 200
task.refresh_from_db()
assert task.priority == -1
assert task.attachment
assert task.subject == subject
# Download attachment
response = self.client.get(reverse('uni_ticket:download_task_attachment',
kwargs={'ticket_id': self.ticket.code,
'task_id': task.code}),
follow=True)
self.assertFalse(response.status_code == 404)
# Delete attachment
response = self.client.get(reverse('uni_ticket:manage_elimina_allegato_task',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'task_id': task.code}),
follow=True)
assert response.status_code == 200
task.refresh_from_db()
self.assertFalse(task.attachment)
# Close task without motivation (fails!)
params = {}
response = self.client.post(reverse('uni_ticket:manager_close_task',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'task_id': task.code}),
params,
follow=True)
assert response.status_code == 200
task.refresh_from_db()
assert not task.is_closed
# Close task with motivation
params = {'note': "notes",
'status': 1}
response = self.client.post(reverse('uni_ticket:manager_close_task',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'task_id': task.code}),
params,
follow=True)
assert response.status_code == 200
task.refresh_from_db()
assert task.is_closed
# Reopen task
response = self.client.get(reverse('uni_ticket:reopen_task',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'task_id': task.code}),
follow=True)
assert response.status_code == 200
task.refresh_from_db()
assert not task.is_closed
# Remove task
response = self.client.get(reverse('uni_ticket:task_remove',
kwargs={'structure_slug': self.structure_1.slug,
'ticket_id': self.ticket.code,
'task_id': task.code}),
follow=True)
assert response.status_code == 200
self.assertFalse(Task.objects.filter(ticket=self.ticket))
| 52.865234
| 111
| 0.489083
| 2,396
| 27,067
| 5.29591
| 0.068447
| 0.067775
| 0.047443
| 0.077863
| 0.829459
| 0.82079
| 0.792497
| 0.775081
| 0.71558
| 0.696745
| 0
| 0.016895
| 0.429268
| 27,067
| 511
| 112
| 52.968689
| 0.804505
| 0.053903
| 0
| 0.708738
| 0
| 0
| 0.119806
| 0.051647
| 0
| 0
| 0
| 0
| 0.18932
| 1
| 0.026699
| false
| 0
| 0.014563
| 0
| 0.043689
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49c47c2d9a95074a75fe21f0d4fe7789589f2451
| 122
|
py
|
Python
|
tests/test_docx.py
|
tmbb/playfair
|
cad2491c955259e0482a443cea94f5d334b6e05e
|
[
"MIT"
] | null | null | null |
tests/test_docx.py
|
tmbb/playfair
|
cad2491c955259e0482a443cea94f5d334b6e05e
|
[
"MIT"
] | null | null | null |
tests/test_docx.py
|
tmbb/playfair
|
cad2491c955259e0482a443cea94f5d334b6e05e
|
[
"MIT"
] | null | null | null |
from playfair.docx import *
from docx import Document
def test_can_get_new_styled_document():
new_styled_document()
| 17.428571
| 39
| 0.803279
| 18
| 122
| 5.055556
| 0.611111
| 0.21978
| 0.373626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.139344
| 122
| 6
| 40
| 20.333333
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
49d55b3079fc26c79dc37e50530ef77075645083
| 17,031
|
py
|
Python
|
tests/python/MixingHelpersTest.py
|
mjtitchener-fn/OpenColorIO
|
00b5362442b9fe954c4b1161fe0cec621fcf1915
|
[
"BSD-3-Clause"
] | 628
|
2018-08-11T02:18:36.000Z
|
2022-03-31T15:05:23.000Z
|
tests/python/MixingHelpersTest.py
|
mjtitchener-fn/OpenColorIO
|
00b5362442b9fe954c4b1161fe0cec621fcf1915
|
[
"BSD-3-Clause"
] | 655
|
2019-04-16T15:15:31.000Z
|
2022-03-31T18:05:52.000Z
|
tests/python/MixingHelpersTest.py
|
mjtitchener-fn/OpenColorIO
|
00b5362442b9fe954c4b1161fe0cec621fcf1915
|
[
"BSD-3-Clause"
] | 181
|
2018-12-22T15:39:52.000Z
|
2022-03-22T09:52:27.000Z
|
# SPDX-License-Identifier: BSD-3-Clause
# Copyright Contributors to the OpenColorIO Project.
import unittest
import os
import sys
import PyOpenColorIO as OCIO
from UnitTestUtils import SAMPLE_CONFIG
def test_percent_1000(a, b):
# Helper function to test sliders.
return abs(a - int(100000 * b)) <= 1
class MixingHelpersTest(unittest.TestCase):
def setUp(self):
self.cfg = OCIO.Config().CreateFromStream(SAMPLE_CONFIG)
def tearDown(self):
self.cfg = None
def test_encoding(self):
"""
Test MixingColorSpaceManager encodings.
"""
mix = OCIO.MixingColorSpaceManager(self.cfg)
self.assertEqual(mix.getSelectedMixingEncodingIdx(), 0)
encodings = mix.getMixingEncodings()
self.assertEqual(len(encodings), 2)
self.assertEqual(encodings[0], 'RGB')
self.assertEqual(encodings[1], 'HSV')
mix.setSelectedMixingEncoding('HSV')
self.assertEqual(mix.getSelectedMixingEncodingIdx(), 1)
mix.setSelectedMixingEncodingIdx(0)
self.assertEqual(mix.getSelectedMixingEncodingIdx(), 0)
with self.assertRaises(OCIO.Exception):
mix.setSelectedMixingEncoding('HS')
for param in [None, 1, OCIO.TRANSFORM_DIR_FORWARD]:
with self.assertRaises(TypeError):
mix.setSelectedMixingEncoding(param)
for param in [None, 'test']:
with self.assertRaises(TypeError):
mix.setSelectedMixingEncodingIdx(param)
# Print the MixingColorSpaceManager.
self.assertEqual(str(mix),
('config: $0f5cce49d82e023765b1d657191c3417:$4dd1c89df8002b409e089089ce8f24e7, '
'slider: [minEdge: 0, maxEdge: 0.833864], mixingSpaces: [Rendering Space, '
'Display Space], selectedMixingSpaceIdx: 0, selectedMixingEncodingIdx: 0'))
mix = None
def test_mixing_space(self):
"""
Test MixingColorSpaceManager mixing spaces for a config without ROLE_COLOR_PICKING role.
"""
mix = OCIO.MixingColorSpaceManager(self.cfg)
self.assertEqual(mix.getSelectedMixingSpaceIdx(), 0)
mixSpaces = mix.getMixingSpaces()
self.assertEqual(len(mixSpaces), 2)
self.assertEqual(mixSpaces[0], 'Rendering Space')
self.assertEqual(mixSpaces[1], 'Display Space')
mix.setSelectedMixingSpace('Display Space')
self.assertEqual(mix.getSelectedMixingSpaceIdx(), 1)
mix.setSelectedMixingSpaceIdx(0)
self.assertEqual(mix.getSelectedMixingSpaceIdx(), 0)
with self.assertRaises(OCIO.Exception):
mix.setSelectedMixingSpace('DisplaySpace')
for param in [None, 1, OCIO.TRANSFORM_DIR_FORWARD]:
with self.assertRaises(TypeError):
mix.setSelectedMixingSpace(param)
for param in [None, 'test']:
with self.assertRaises(TypeError):
mix.setSelectedMixingSpaceIdx(param)
mix = None
def test_get_processor(self):
"""
Test getProcessor() function without ROLE_COLOR_PICKING role.
"""
mix = OCIO.MixingColorSpaceManager(self.cfg)
# Using default encoding (RGB) and mixing space (rendering space).
proc = mix.getProcessor(workingSpaceName = 'lin_1', displayName = 'DISP_1', viewName = 'VIEW_1')
grp = proc.createGroupTransform()
self.assertEqual(len(grp), 1)
self.assertEqual(grp[0].getTransformType(), OCIO.TRANSFORM_TYPE_MATRIX)
# Same call without parameter names.
proc = mix.getProcessor('lin_1', 'DISP_1', 'VIEW_1')
grp = proc.createGroupTransform()
self.assertEqual(len(grp), 1)
self.assertEqual(grp[0].getTransformType(), OCIO.TRANSFORM_TYPE_MATRIX)
# Same call with different a order for parameter names.
proc = mix.getProcessor(viewName = 'VIEW_1', workingSpaceName = 'lin_1', displayName = 'DISP_1')
grp = proc.createGroupTransform()
self.assertEqual(len(grp), 1)
self.assertEqual(grp[0].getTransformType(), OCIO.TRANSFORM_TYPE_MATRIX)
# Change encoding to HSV.
mix.setSelectedMixingEncoding('HSV')
proc = mix.getProcessor(displayName = 'DISP_1', viewName = 'VIEW_1', workingSpaceName = 'lin_1')
grp = proc.createGroupTransform()
self.assertEqual(len(grp), 2)
self.assertEqual(grp[0].getTransformType(), OCIO.TRANSFORM_TYPE_MATRIX)
t = grp[1]
self.assertEqual(t.getTransformType(), OCIO.TRANSFORM_TYPE_FIXED_FUNCTION)
self.assertEqual(t.getDirection(), OCIO.TRANSFORM_DIR_FORWARD)
self.assertEqual(t.getStyle(), OCIO.FIXED_FUNCTION_RGB_TO_HSV)
# Change mixing space to 'Display Space'.
mix.setSelectedMixingSpace('Display Space')
proc = mix.getProcessor(displayName = 'DISP_1', viewName = 'VIEW_1', workingSpaceName = 'lin_1')
grp = proc.createGroupTransform()
self.assertEqual(len(grp), 2)
t = grp[0]
self.assertEqual(t.getTransformType(), OCIO.TRANSFORM_TYPE_EXPONENT)
self.assertEqual(t.getDirection(), OCIO.TRANSFORM_DIR_INVERSE)
self.assertEqual(t.getValue(), [2.6, 2.6, 2.6, 1.])
t = grp[1]
self.assertEqual(t.getTransformType(), OCIO.TRANSFORM_TYPE_FIXED_FUNCTION)
self.assertEqual(t.getDirection(), OCIO.TRANSFORM_DIR_FORWARD)
self.assertEqual(t.getStyle(), OCIO.FIXED_FUNCTION_RGB_TO_HSV)
proc = mix.getProcessor(displayName = 'DISP_1', viewName = 'VIEW_1', workingSpaceName = 'lin_1',
direction = OCIO.TRANSFORM_DIR_INVERSE)
grp = proc.createGroupTransform()
self.assertEqual(len(grp), 2)
t = grp[1]
self.assertEqual(t.getTransformType(), OCIO.TRANSFORM_TYPE_EXPONENT)
self.assertEqual(t.getDirection(), OCIO.TRANSFORM_DIR_FORWARD)
self.assertEqual(t.getValue(), [2.6, 2.6, 2.6, 1.])
t = grp[0]
self.assertEqual(t.getTransformType(), OCIO.TRANSFORM_TYPE_FIXED_FUNCTION)
self.assertEqual(t.getDirection(), OCIO.TRANSFORM_DIR_INVERSE)
self.assertEqual(t.getStyle(), OCIO.FIXED_FUNCTION_RGB_TO_HSV)
with self.assertRaises(TypeError):
proc = mix.getProcessor('lin_1', None, 'VIEW_1')
with self.assertRaises(TypeError):
proc = mix.getProcessor(workingSpaceName = 'lin_1', displayName = 666, viewName = 'VIEW_1')
with self.assertRaises(TypeError):
proc = mix.getProcessor(workingSpaceName = 'lin_1', displayName = 'DISP_1',
viewName = OCIO.TRANSFORM_TYPE_LOG)
with self.assertRaises(TypeError):
proc = mix.getProcessor(workingSpaceName = 'lin_1', displayName = 'DISP_1')
with self.assertRaises(TypeError):
proc = mix.getProcessor(workingSpaceName = 'lin_1', displayName = 'DISP_1',
errorName = 'VIEW_1')
with self.assertRaises(TypeError):
proc = mix.getProcessor(workingSpaceName = 'lin_1', displayName = 'DISP_1',
viewName = 'VIEW_1', tooMany = True)
with self.assertRaises(OCIO.Exception):
proc = mix.getProcessor('lin_1', '', 'VIEW_1')
with self.assertRaises(OCIO.Exception):
proc = mix.getProcessor('', 'DISP_1', 'VIEW_1')
with self.assertRaises(OCIO.Exception):
proc = mix.getProcessor('not found', 'DISP_1', 'VIEW_1')
mix = None
def test_color_picking(self):
"""
Test getProcessor() function with ROLE_COLOR_PICKING role.
"""
mix = OCIO.MixingColorSpaceManager(self.cfg)
mixSpaces = mix.getMixingSpaces()
self.assertEqual(len(mixSpaces), 2)
self.cfg.setRole(OCIO.ROLE_COLOR_PICKING, 'log_1')
mix.refresh(self.cfg)
mixSpaces = mix.getMixingSpaces()
self.assertEqual(len(mixSpaces), 1)
self.assertEqual(mixSpaces[0], 'color_picking (log_1)')
proc = mix.getProcessor(workingSpaceName = 'lin_1', displayName = 'DISP_1', viewName = 'VIEW_1')
grp = proc.createGroupTransform()
self.assertEqual(len(grp), 1)
t = grp[0]
self.assertEqual(t.getTransformType(), OCIO.TRANSFORM_TYPE_LOG)
self.assertEqual(t.getDirection(), OCIO.TRANSFORM_DIR_FORWARD)
self.assertEqual(t.getBase(), 2.)
mix.setSelectedMixingEncodingIdx(1) # i.e. HSV
proc = mix.getProcessor(displayName = 'DISP_1', viewName = 'VIEW_1', workingSpaceName = 'lin_1')
grp = proc.createGroupTransform()
self.assertEqual(len(grp), 2)
t = grp[0]
self.assertEqual(t.getTransformType(), OCIO.TRANSFORM_TYPE_LOG)
self.assertEqual(t.getDirection(), OCIO.TRANSFORM_DIR_FORWARD)
self.assertEqual(t.getBase(), 2.)
t = grp[1]
self.assertEqual(t.getTransformType(), OCIO.TRANSFORM_TYPE_FIXED_FUNCTION)
self.assertEqual(t.getDirection(), OCIO.TRANSFORM_DIR_FORWARD)
self.assertEqual(t.getStyle(), OCIO.FIXED_FUNCTION_RGB_TO_HSV)
with self.assertRaises(OCIO.Exception):
mix.setSelectedMixingSpaceIdx(1)
# Print the MixingColorSpaceManager.
self.assertEqual(str(mix),
('config: $470958575eb3f906f985d31be8279e0d:$4dd1c89df8002b409e089089ce8f24e7, '
'slider: [minEdge: 0, maxEdge: 1], mixingSpaces: [color_picking (log_1)], '
'selectedMixingSpaceIdx: 0, selectedMixingEncodingIdx: 1, colorPicking'))
mix = None
def test_mixing_slider(self):
"""
"""
mix = OCIO.MixingColorSpaceManager(self.cfg)
slider = mix.getSlider(sliderMixingMinEdge=0.0, sliderMixingMaxEdge=1.0)
# Print the slider.
self.assertEqual(str(slider), 'minEdge: 0, maxEdge: 0.833864')
# Set encoding.
mix.setSelectedMixingEncodingIdx(1) # i.e. HSV
# Needs linear to perceptually linear adjustment.
mix.setSelectedMixingSpaceIdx(0) # i.e. Rendering Space
self.assertEqual(mix.getSelectedMixingSpaceIdx(), 0)
slider.setSliderMinEdge(0.0)
slider.setSliderMaxEdge(1.0)
self.assertTrue(test_percent_1000( 0, slider.getSliderMinEdge()))
self.assertTrue(test_percent_1000(83386, slider.getSliderMaxEdge()))
self.assertTrue(test_percent_1000(37923, slider.mixingToSlider(mixingUnits=0.1)))
self.assertTrue(test_percent_1000(80144, slider.mixingToSlider(0.5)))
self.assertTrue(test_percent_1000(10000, slider.sliderToMixing(sliderUnits=0.379232)))
self.assertTrue(test_percent_1000(50000, slider.sliderToMixing(0.801448)))
slider.setSliderMinEdge(-0.2)
slider.setSliderMaxEdge(5.)
self.assertTrue(test_percent_1000( 3792, slider.mixingToSlider(-0.1)))
self.assertTrue(test_percent_1000(31573, slider.mixingToSlider( 0.1)))
self.assertTrue(test_percent_1000(58279, slider.mixingToSlider( 0.5)))
self.assertTrue(test_percent_1000(90744, slider.mixingToSlider( 3.0)))
self.assertTrue(test_percent_1000(-10000, slider.sliderToMixing(0.037927)))
self.assertTrue(test_percent_1000( 10000, slider.sliderToMixing(0.315733)))
self.assertTrue(test_percent_1000( 50000, slider.sliderToMixing(0.582797)))
self.assertTrue(test_percent_1000(300000, slider.sliderToMixing(0.907444)))
# Does not need any linear to perceptually linear adjustment.
mix.setSelectedMixingSpaceIdx(1) # i.e. Display Space
# Print the slider.
self.assertEqual(str(slider), 'minEdge: -0.2, maxEdge: 5')
slider.setSliderMinEdge(0.0)
slider.setSliderMaxEdge(1.0)
self.assertTrue(test_percent_1000( 0, slider.getSliderMinEdge()))
self.assertTrue(test_percent_1000(100000, slider.getSliderMaxEdge()))
self.assertTrue(test_percent_1000(10000, slider.mixingToSlider(0.1)))
self.assertTrue(test_percent_1000(50000, slider.mixingToSlider(0.5)))
self.assertTrue(test_percent_1000(37923, slider.sliderToMixing(0.379232)))
self.assertTrue(test_percent_1000(80144, slider.sliderToMixing(0.801448)))
slider.setSliderMinEdge(-0.2)
slider.setSliderMaxEdge(5.)
self.assertTrue(test_percent_1000( 0, slider.mixingToSlider(slider.getSliderMinEdge())))
self.assertTrue(test_percent_1000(100000, slider.mixingToSlider(slider.getSliderMaxEdge())))
self.assertTrue(test_percent_1000( 1923, slider.mixingToSlider(-0.1)))
self.assertTrue(test_percent_1000( 5769, slider.mixingToSlider( 0.1)))
self.assertTrue(test_percent_1000(13461, slider.mixingToSlider( 0.5)))
self.assertTrue(test_percent_1000(61538, slider.mixingToSlider( 3.0)))
self.assertTrue(test_percent_1000( -277, slider.sliderToMixing(0.037927)))
self.assertTrue(test_percent_1000(144181, slider.sliderToMixing(0.315733)))
self.assertTrue(test_percent_1000(283054, slider.sliderToMixing(0.582797)))
self.assertTrue(test_percent_1000(451870, slider.sliderToMixing(0.907444)))
# Change encoding.
mix.setSelectedMixingEncodingIdx(0) # i.e. RGB
# Needs linear to perceptually linear adjustment.
mix.setSelectedMixingSpaceIdx(0) # i.e. Rendering Space
slider.setSliderMinEdge(0.0)
slider.setSliderMaxEdge(1.0)
self.assertTrue(test_percent_1000( 0, slider.getSliderMinEdge()))
self.assertTrue(test_percent_1000(83386, slider.getSliderMaxEdge()))
self.assertTrue(test_percent_1000(37923, slider.mixingToSlider(0.1)))
self.assertTrue(test_percent_1000(80144, slider.mixingToSlider(0.5)))
self.assertTrue(test_percent_1000(10000, slider.sliderToMixing(0.379232)))
self.assertTrue(test_percent_1000(50000, slider.sliderToMixing(0.801448)))
slider.setSliderMinEdge(-0.2)
slider.setSliderMaxEdge(5.)
self.assertTrue(test_percent_1000( 3792, slider.mixingToSlider(-0.1)))
self.assertTrue(test_percent_1000(31573, slider.mixingToSlider( 0.1)))
self.assertTrue(test_percent_1000(58279, slider.mixingToSlider( 0.5)))
self.assertTrue(test_percent_1000(90744, slider.mixingToSlider( 3.0)))
self.assertTrue(test_percent_1000(-10000, slider.sliderToMixing(0.037927)))
self.assertTrue(test_percent_1000( 10000, slider.sliderToMixing(0.315733)))
self.assertTrue(test_percent_1000( 50000, slider.sliderToMixing(0.582797)))
self.assertTrue(test_percent_1000(300000, slider.sliderToMixing(0.907444)))
# Does not need any linear to perceptually linear adjustment.
mix.setSelectedMixingSpaceIdx(1) # i.e. Display Space
slider.setSliderMinEdge(0.0)
slider.setSliderMaxEdge(1.0)
self.assertTrue(test_percent_1000( 0, slider.getSliderMinEdge()))
self.assertTrue(test_percent_1000(100000, slider.getSliderMaxEdge()))
self.assertTrue(test_percent_1000(10000, slider.mixingToSlider(0.1)))
self.assertTrue(test_percent_1000(50000, slider.mixingToSlider(0.5)))
self.assertTrue(test_percent_1000(37923, slider.sliderToMixing(0.379232)))
self.assertTrue(test_percent_1000(80144, slider.sliderToMixing(0.801448)))
slider.setSliderMinEdge(-0.2)
slider.setSliderMaxEdge(5.)
self.assertTrue(test_percent_1000( 0, slider.mixingToSlider(slider.getSliderMinEdge())))
self.assertTrue(test_percent_1000(100000, slider.mixingToSlider(slider.getSliderMaxEdge())))
self.assertTrue(test_percent_1000( 1923, slider.mixingToSlider(-0.1)))
self.assertTrue(test_percent_1000( 5769, slider.mixingToSlider( 0.1)))
self.assertTrue(test_percent_1000(13461, slider.mixingToSlider( 0.5)))
self.assertTrue(test_percent_1000(61538, slider.mixingToSlider( 3.0)))
self.assertTrue(test_percent_1000( -277, slider.sliderToMixing(0.037927)))
self.assertTrue(test_percent_1000(144181, slider.sliderToMixing(0.315733)))
self.assertTrue(test_percent_1000(283054, slider.sliderToMixing(0.582797)))
self.assertTrue(test_percent_1000(451870, slider.sliderToMixing(0.907444)))
# Update with ROLE_COLOR_PICKING role.
self.cfg.setRole(OCIO.ROLE_COLOR_PICKING, 'log_1')
mix.refresh(self.cfg)
mix.setSelectedMixingEncodingIdx(1) # i.e. HSV
mix.setSelectedMixingSpaceIdx(0) # i.e. Color Picker role
slider = mix.getSlider(0.0, 1.0)
self.assertTrue(test_percent_1000(50501, slider.mixingToSlider(0.50501)))
self.assertTrue(test_percent_1000(50501, slider.sliderToMixing(0.50501)))
mix.setSelectedMixingEncodingIdx(0) # i.e. RGB
mix.setSelectedMixingSpaceIdx(0) # i.e. Color Picker role
self.assertTrue(test_percent_1000(50501, slider.mixingToSlider(0.50501)))
self.assertTrue(test_percent_1000(50501, slider.sliderToMixing(0.50501)))
mix = None
| 42.684211
| 104
| 0.680465
| 1,857
| 17,031
| 6.094777
| 0.103931
| 0.063174
| 0.086146
| 0.141368
| 0.842198
| 0.798198
| 0.764358
| 0.755169
| 0.725747
| 0.692614
| 0
| 0.079344
| 0.205214
| 17,031
| 398
| 105
| 42.791457
| 0.756797
| 0.06817
| 0
| 0.773077
| 0
| 0
| 0.05533
| 0.014865
| 0
| 0
| 0
| 0
| 0.523077
| 1
| 0.030769
| false
| 0
| 0.019231
| 0.003846
| 0.057692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
49df146d3376b382d20dd46bb07c26baa746dc5c
| 106
|
py
|
Python
|
app/auth/__init__.py
|
RisperAkinyi/BlogPost
|
f8ee4c887fceae8e70410b66a12bc5680cf26044
|
[
"MIT"
] | null | null | null |
app/auth/__init__.py
|
RisperAkinyi/BlogPost
|
f8ee4c887fceae8e70410b66a12bc5680cf26044
|
[
"MIT"
] | 1
|
2020-10-27T21:52:06.000Z
|
2020-10-27T21:52:06.000Z
|
app/auth/__init__.py
|
RisperAkinyi/BlogPost
|
f8ee4c887fceae8e70410b66a12bc5680cf26044
|
[
"MIT"
] | null | null | null |
from flask import Blueprint
# auth blueprint
auth = Blueprint('auth',__name__)
from . import views,forms
| 17.666667
| 33
| 0.773585
| 14
| 106
| 5.571429
| 0.571429
| 0.5
| 0.564103
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.141509
| 106
| 6
| 34
| 17.666667
| 0.857143
| 0.132075
| 0
| 0
| 0
| 0
| 0.043956
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 8
|
49e24f23a2dfb829f0e7607bf0850cecad0a3c36
| 38,179
|
py
|
Python
|
src/senet.py
|
haowang1992/DSN
|
0cb243182d20960b073f8c3bcc47c91611acc94f
|
[
"Apache-2.0"
] | 8
|
2021-07-29T01:52:32.000Z
|
2022-02-25T08:13:52.000Z
|
src/senet.py
|
haowang1992/DSN
|
0cb243182d20960b073f8c3bcc47c91611acc94f
|
[
"Apache-2.0"
] | 1
|
2021-11-28T08:10:04.000Z
|
2022-02-03T05:35:46.000Z
|
src/senet.py
|
haowang1992/DSN
|
0cb243182d20960b073f8c3bcc47c91611acc94f
|
[
"Apache-2.0"
] | 1
|
2022-03-22T01:39:17.000Z
|
2022-03-22T01:39:17.000Z
|
"""
ResNet code gently borrowed from
https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
"""
from __future__ import print_function, division, absolute_import
from collections import OrderedDict
import math
import torch
import torch.nn as nn
from torch.utils import model_zoo
import numpy as np
import pdb
__all__ = ['SENet', 'senet154', 'se_resnet50', 'se_resnet101', 'se_resnet152',
'se_resnext50_32x4d', 'se_resnext101_32x4d']
class EMSLayer(nn.Module):
def __init__(self, num_classes, num_dimension):
super(EMSLayer, self).__init__()
self.cpars = torch.nn.Parameter(torch.randn(num_classes, num_dimension))
self.relu = torch.nn.ReLU(inplace=True)
def forward(self, x):
out = pairwise_distances(x, self.cpars)
out = - self.relu(out).sqrt()
return out
def pairwise_distances(x, y=None):
'''
Input: x is a Nxd matrix
y is an optional Mxd matirx
Output: dist is a NxM matrix where dist[i,j] is the square norm between x[i,:] and y[j,:]
if y is not given then use 'y=x'.
i.e. dist[i,j] = ||x[i,:]-y[j,:]||^2
'''
x_norm = (x**2).sum(1).view(-1, 1)
if y is not None:
y_norm = (y**2).sum(1).view(1, -1)
else:
y = x
y_norm = x_norm.view(1, -1)
dist = x_norm + y_norm - 2.0 * torch.mm(x, torch.transpose(y, 0, 1))
return dist
pretrained_settings = {
'senet154': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/senet154-c7b49a05.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet50': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet50-ce0d4300.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet101': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet101-7e38fcc6.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnet152': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnet152-d17c99b7.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnext50_32x4d': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnext50_32x4d-a260b3a4.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
'se_resnext101_32x4d': {
'imagenet': {
'url': 'http://data.lip6.fr/cadene/pretrainedmodels/se_resnext101_32x4d-3b2fe3d8.pth',
'input_space': 'RGB',
'input_size': [3, 224, 224],
'input_range': [0, 1],
'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225],
'num_classes': 1000
}
},
}
# class SEModule(nn.Module):
# def __init__(self, channels, reduction):
# super(SEModule, self).__init__()
# self.avg_pool = nn.AdaptiveAvgPool2d(1)
# self.fc1 = nn.Conv2d(channels, channels // reduction, kernel_size=1,
# padding=0)
# self.relu = nn.ReLU(inplace=True)
# self.fc2 = nn.Conv2d(channels // reduction, channels, kernel_size=1,
# padding=0)
# self.sigmoid = nn.Sigmoid()
# def forward(self, x):
# module_input = x
# x = self.avg_pool(x)
# x = self.fc1(x)
# x = self.relu(x)
# x = self.fc2(x)
# x = self.sigmoid(x)
# return module_input * x
class CSSEModule(nn.Module):
def __init__(self, channels, reduction):
super(CSSEModule, self).__init__()
# SSE
# self.conv1 = nn.Conv2d(in_channels=channels, out_channels=channels//reduction, kernel_size=1, bias=True)
# self.conv2 = nn.Conv2d(in_channels=channels//reduction + 1, out_channels=channels, kernel_size=1, bias=True)
# self.conv_img = nn.Conv2d(in_channels=1, out_channels=1, kernel_size=3, padding=1, stride=1, bias=True)
# self.conv_ske = nn.Conv2d(in_channels=1, out_channels=1, kernel_size=3, padding=1, stride=1, bias=True)
self.conv1 = nn.Conv2d(in_channels=1, out_channels=1, kernel_size=3, padding=1, stride=1, bias=True)
# self.bn1 = nn.BatchNorm2d(1, eps=1e-5, momentum=0.01, affine=True)
self.conv2 = nn.Conv2d(in_channels=2, out_channels=1, kernel_size=3, padding=1, stride=1, bias=True)
# self.bn2 = nn.BatchNorm2d(1, eps=1e-5, momentum=0.01, affine=True)
# CSE
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Linear(in_features=channels, out_features=channels // reduction, bias=True)
self.fc2 = nn.Linear(in_features=channels // reduction + 1, out_features=channels, bias=True)
self.relu = nn.ReLU(inplace=True)
self.sigmoid = nn.Sigmoid()
def forward(self, x, y):
# SSE
module_input = x
avg = torch.mean(x, 1).unsqueeze(1)
# max = torch.max(x, 1)[0].unsqueeze(1)
# x = torch.cat((avg, max), 1)
x = self.conv1(avg)
# x = self.bn1(x)
x = self.relu(x)
x = torch.cat((x, y.view(y.size(0), 1, 1, 1).expand(y.size(0), 1, x.size(2), x.size(3))), 1)
x = self.conv2(x)
# x = self.bn2(x)
sse_weight = self.sigmoid(x) + 0.5
# x = torch.mean(x, 1).unsqueeze(1)
# img = self.conv_img(x)
# ske = self.conv_ske(x)
# x = y.view(-1, 1, 1, 1) * img + (1 - y).view(-1, 1, 1, 1) * ske
# sse_weight = self.sigmoid(x)
# CSE
x = module_input
x = self.avg_pool(x)
x = x.view(x.size()[0], -1)
x = self.fc1(x)
x = self.relu(x)
x = torch.cat((x, y), 1)
x = self.fc2(x)
x = self.sigmoid(x)
cse_weight = x.view(x.size()[0], -1, 1, 1)
# module_input = module_input * cse_weight
# x = module_input
# avg = torch.mean(x, 1).unsqueeze(1)
# # max = torch.max(x, 1)[0].unsqueeze(1)
# # x = torch.cat((avg, max), 1)
# x = self.conv1(avg)
# x = self.relu(x)
# # x = self.bn1(x)
# x = torch.cat((x, y.view(y.size(0), 1, 1, 1).expand(y.size(0), 1, x.size(2), x.size(3))), 1)
# x = self.conv2(x)
# # x = self.bn2(x)
# sse_weight = self.sigmoid(x)
return module_input * sse_weight * cse_weight
class SSEModule(nn.Module):
def __init__(self, channels, reduction):
super(SSEModule, self).__init__()
self.conv1 = nn.Conv2d(in_channels=channels, out_channels=channels//reduction, kernel_size=1, bias=True)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(in_channels=channels//reduction + 1, out_channels=channels, kernel_size=1, bias=True)
self.sigmoid = nn.Sigmoid()
def forward(self, x, y):
module_input = x
x = self.conv1(x)
x = self.relu(x)
x = torch.cat((x, y.view(y.size(0), 1, 1, 1).expand(y.size(0), 1, x.size(2), x.size(3))), 1)
x = self.conv2(x)
x = self.sigmoid(x)
return module_input * x
class CSEModule(nn.Module):
def __init__(self, channels, reduction):
super(CSEModule, self).__init__()
self.avg_pool = nn.AdaptiveAvgPool2d(1)
self.fc1 = nn.Linear(in_features=channels, out_features=channels // reduction, bias=True)
self.relu = nn.ReLU(inplace=True)
self.fc2 = nn.Linear(in_features=channels // reduction+1, out_features=channels, bias=True)
self.sigmoid = nn.Sigmoid()
def forward(self, x, y):
module_input = x
x = self.avg_pool(x)
x = x.view(x.size()[0], -1)
x = self.fc1(x)
x = self.relu(x)
x = torch.cat((x, y), 1)
x = self.fc2(x)
x = self.sigmoid(x)
x = x.view(x.size()[0], -1, 1, 1)
return module_input * x
class Bottleneck(nn.Module):
"""
Base class for bottlenecks that implements `forward()` method.
"""
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out = self.se_module(out) + residual
out = self.relu(out)
return out
# class SEBottleneck(Bottleneck):
# """
# Bottleneck for SENet154.
# """
# expansion = 4
# def __init__(self, inplanes, planes, groups, reduction, stride=1,
# downsample=None):
# super(SEBottleneck, self).__init__()
# self.conv1 = nn.Conv2d(inplanes, planes * 2, kernel_size=1, bias=False)
# self.bn1 = nn.BatchNorm2d(planes * 2)
# self.conv2 = nn.Conv2d(planes * 2, planes * 4, kernel_size=3,
# stride=stride, padding=1, groups=groups,
# bias=False)
# self.bn2 = nn.BatchNorm2d(planes * 4)
# self.conv3 = nn.Conv2d(planes * 4, planes * 4, kernel_size=1,
# bias=False)
# self.bn3 = nn.BatchNorm2d(planes * 4)
# self.relu = nn.ReLU(inplace=True)
# self.se_module = SEModule(planes * 4, reduction=reduction)
# self.downsample = downsample
# self.stride = stride
# class SEResNetBottleneck(Bottleneck):
# """
# ResNet bottleneck with a Squeeze-and-Excitation module. It follows Caffe
# implementation and uses `stride=stride` in `conv1` and not in `conv2`
# (the latter is used in the torchvision implementation of ResNet).
# """
# expansion = 4
# def __init__(self, inplanes, planes, groups, reduction, stride=1,
# downsample=None):
# super(CSEResNetBottleneck, self).__init__()
# self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False,
# stride=stride)
# self.bn1 = nn.BatchNorm2d(planes)
# self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1,
# groups=groups, bias=False)
# self.bn2 = nn.BatchNorm2d(planes)
# self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
# self.bn3 = nn.BatchNorm2d(planes * 4)
# self.relu = nn.ReLU(inplace=True)
# self.se_module = SEModule(planes * 4, reduction=reduction)
# self.downsample = downsample
# self.stride = stride
class CSSEResNetBottleneck(Bottleneck):
"""
ResNet bottleneck with a Squeeze-and-Excitation module. It follows Caffe
implementation and uses `stride=stride` in `conv1` and not in `conv2`
(the latter is used in the torchvision implementation of ResNet).
"""
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None):
super(CSSEResNetBottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False,
stride=stride)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1,
groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = CSSEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
def forward(self, x, y):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out = self.se_module(out, y) + residual
out = self.relu(out)
return out
class SSEResNetBottleneck(Bottleneck):
"""
ResNet bottleneck with a Squeeze-and-Excitation module. It follows Caffe
implementation and uses `stride=stride` in `conv1` and not in `conv2`
(the latter is used in the torchvision implementation of ResNet).
"""
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None):
super(SSEResNetBottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False,
stride=stride)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1,
groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = SSEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
def forward(self, x, y):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out = self.se_module(out, y) + residual
out = self.relu(out)
return out
class CSEResNetBottleneck(Bottleneck):
"""
ResNet bottleneck with a Squeeze-and-Excitation module. It follows Caffe
implementation and uses `stride=stride` in `conv1` and not in `conv2`
(the latter is used in the torchvision implementation of ResNet).
"""
expansion = 4
def __init__(self, inplanes, planes, groups, reduction, stride=1,
downsample=None):
super(CSEResNetBottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False,
stride=stride)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, padding=1,
groups=groups, bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.se_module = CSEModule(planes * 4, reduction=reduction)
self.downsample = downsample
self.stride = stride
def forward(self, x, y):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out = self.se_module(out, y) + residual
out = self.relu(out)
return out
class CSENet(nn.Module):
def __init__(self, block, layers, groups, reduction, dropout_p=0.2,
inplanes=128, input_3x3=True, downsample_kernel_size=3,
downsample_padding=1, num_classes=1000, ems=False):
"""
Parameters
----------
block (nn.Module): Bottleneck class.
- For SENet154: SEBottleneck
- For CSE-ResNet models: CSEResNetBottleneck
- For SE-ResNeXt models: SEResNeXtBottleneck
layers (list of ints): Number of residual blocks for 4 layers of the
network (layer1...layer4).
groups (int): Number of groups for the 3x3 convolution in each
bottleneck block.
- For SENet154: 64
- For SE-ResNet models: 1
- For SE-ResNeXt models: 32
reduction (int): Reduction ratio for Squeeze-and-Excitation modules.
- For all models: 16
dropout_p (float or None): Drop probability for the Dropout layer.
If `None` the Dropout layer is not used.
- For SENet154: 0.2
- For SE-ResNet models: None
- For SE-ResNeXt models: None
inplanes (int): Number of input channels for layer1.
- For SENet154: 128
- For SE-ResNet models: 64
- For SE-ResNeXt models: 64
input_3x3 (bool): If `True`, use three 3x3 convolutions instead of
a single 7x7 convolution in layer0.
- For SENet154: True
- For SE-ResNet models: False
- For SE-ResNeXt models: False
downsample_kernel_size (int): Kernel size for downsampling convolutions
in layer2, layer3 and layer4.
- For SENet154: 3
- For SE-ResNet models: 1
- For SE-ResNeXt models: 1
downsample_padding (int): Padding for downsampling convolutions in
layer2, layer3 and layer4.
- For SENet154: 1
- For SE-ResNet models: 0
- For SE-ResNeXt models: 0
num_classes (int): Number of outputs in `last_linear` layer.
- For all models: 1000
"""
super(CSENet, self).__init__()
self.inplanes = inplanes
if input_3x3:
layer0_modules = [
('conv1', nn.Conv2d(3, 64, 3, stride=2, padding=1,
bias=False)),
('bn1', nn.BatchNorm2d(64)),
('relu1', nn.ReLU(inplace=True)),
('conv2', nn.Conv2d(64, 64, 3, stride=1, padding=1,
bias=False)),
('bn2', nn.BatchNorm2d(64)),
('relu2', nn.ReLU(inplace=True)),
('conv3', nn.Conv2d(64, inplanes, 3, stride=1, padding=1,
bias=False)),
('bn3', nn.BatchNorm2d(inplanes)),
('relu3', nn.ReLU(inplace=True)),
]
else:
layer0_modules = [
('conv1', nn.Conv2d(3, inplanes, kernel_size=7, stride=2,
padding=3, bias=False)),
('bn1', nn.BatchNorm2d(inplanes)),
('relu1', nn.ReLU(inplace=True)),
]
# To preserve compatibility with Caffe weights `ceil_mode=True`
# is used instead of `padding=1`.
layer0_modules.append(('pool', nn.MaxPool2d(3, stride=2,
ceil_mode=True)))
self.layer0 = nn.Sequential(OrderedDict(layer0_modules))
self.layer1 = self._make_layer(
block,
planes=64,
blocks=layers[0],
groups=groups,
reduction=reduction,
downsample_kernel_size=1,
downsample_padding=0
)
self.layer2 = self._make_layer(
block,
planes=128,
blocks=layers[1],
stride=2,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.layer3 = self._make_layer(
block,
planes=256,
blocks=layers[2],
stride=2,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.layer4 = self._make_layer(
block,
planes=512,
blocks=layers[3],
stride=2,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.avg_pool = nn.AvgPool2d(7, stride=1)
self.dropout = nn.Dropout(dropout_p) if dropout_p is not None else None
self.ems = ems
if self.ems:
self.last_linear = EMSLayer(num_classes, 512 * block.expansion)
else:
self.last_linear = nn.Linear(512 * block.expansion, num_classes)
def _make_layer(self, block, planes, blocks, groups, reduction, stride=1,
downsample_kernel_size=1, downsample_padding=0):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=downsample_kernel_size, stride=stride,
padding=downsample_padding, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, groups, reduction, stride,
downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, groups, reduction))
return nn.Sequential(*layers)
def features(self, x, y):
x = self.layer0(x)
for m in self.layer1._modules.values():
x = m(x, y)
for m in self.layer2._modules.values():
x = m(x, y)
for m in self.layer3._modules.values():
x = m(x, y)
for m in self.layer4._modules.values():
x = m(x, y)
return x
def logits(self, x):
x = self.avg_pool(x)
if self.dropout is not None:
x = self.dropout(x)
x = x.view(x.size(0), -1)
x = self.last_linear(x)
return x
def forward(self, x, y):
x = self.features(x, y)
x = self.logits(x)
return x
class CSENet_hashing(nn.Module):
def __init__(self, block, layers, groups, reduction, dropout_p=0.2,
inplanes=128, input_3x3=True, downsample_kernel_size=3,
downsample_padding=1, num_classes=1000, ems=False, hashing_dim = 64):
"""
Parameters
----------
block (nn.Module): Bottleneck class.
- For SENet154: SEBottleneck
- For CSE-ResNet models: CSEResNetBottleneck
- For SE-ResNeXt models: SEResNeXtBottleneck
layers (list of ints): Number of residual blocks for 4 layers of the
network (layer1...layer4).
groups (int): Number of groups for the 3x3 convolution in each
bottleneck block.
- For SENet154: 64
- For SE-ResNet models: 1
- For SE-ResNeXt models: 32
reduction (int): Reduction ratio for Squeeze-and-Excitation modules.
- For all models: 16
dropout_p (float or None): Drop probability for the Dropout layer.
If `None` the Dropout layer is not used.
- For SENet154: 0.2
- For SE-ResNet models: None
- For SE-ResNeXt models: None
inplanes (int): Number of input channels for layer1.
- For SENet154: 128
- For SE-ResNet models: 64
- For SE-ResNeXt models: 64
input_3x3 (bool): If `True`, use three 3x3 convolutions instead of
a single 7x7 convolution in layer0.
- For SENet154: True
- For SE-ResNet models: False
- For SE-ResNeXt models: False
downsample_kernel_size (int): Kernel size for downsampling convolutions
in layer2, layer3 and layer4.
- For SENet154: 3
- For SE-ResNet models: 1
- For SE-ResNeXt models: 1
downsample_padding (int): Padding for downsampling convolutions in
layer2, layer3 and layer4.
- For SENet154: 1
- For SE-ResNet models: 0
- For SE-ResNeXt models: 0
num_classes (int): Number of outputs in `last_linear` layer.
- For all models: 1000
"""
super(CSENet_hashing, self).__init__()
self.inplanes = inplanes
if input_3x3:
layer0_modules = [
('conv1', nn.Conv2d(3, 64, 3, stride=2, padding=1,
bias=False)),
('bn1', nn.BatchNorm2d(64)),
('relu1', nn.ReLU(inplace=True)),
('conv2', nn.Conv2d(64, 64, 3, stride=1, padding=1,
bias=False)),
('bn2', nn.BatchNorm2d(64)),
('relu2', nn.ReLU(inplace=True)),
('conv3', nn.Conv2d(64, inplanes, 3, stride=1, padding=1,
bias=False)),
('bn3', nn.BatchNorm2d(inplanes)),
('relu3', nn.ReLU(inplace=True)),
]
else:
layer0_modules = [
('conv1', nn.Conv2d(3, inplanes, kernel_size=7, stride=2,
padding=3, bias=False)),
('bn1', nn.BatchNorm2d(inplanes)),
('relu1', nn.ReLU(inplace=True)),
]
# To preserve compatibility with Caffe weights `ceil_mode=True`
# is used instead of `padding=1`.
layer0_modules.append(('pool', nn.MaxPool2d(3, stride=2,
ceil_mode=True)))
self.layer0 = nn.Sequential(OrderedDict(layer0_modules))
self.layer1 = self._make_layer(
block,
planes=64,
blocks=layers[0],
groups=groups,
reduction=reduction,
downsample_kernel_size=1,
downsample_padding=0
)
self.layer2 = self._make_layer(
block,
planes=128,
blocks=layers[1],
stride=2,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.layer3 = self._make_layer(
block,
planes=256,
blocks=layers[2],
stride=2,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.layer4 = self._make_layer(
block,
planes=512,
blocks=layers[3],
stride=2,
groups=groups,
reduction=reduction,
downsample_kernel_size=downsample_kernel_size,
downsample_padding=downsample_padding
)
self.avg_pool = nn.AvgPool2d(7, stride=1)
self.dropout = nn.Dropout(dropout_p) if dropout_p is not None else None
self.ems = ems
self.second_last_linear = nn.Linear(512 * block.expansion, hashing_dim)
if self.ems:
self.last_linear = EMSLayer(num_classes, hasing_dim)
else:
self.last_linear = nn.Linear(hashing_dim, num_classes)
def _make_layer(self, block, planes, blocks, groups, reduction, stride=1,
downsample_kernel_size=1, downsample_padding=0):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion,
kernel_size=downsample_kernel_size, stride=stride,
padding=downsample_padding, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, groups, reduction, stride,
downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes, groups, reduction))
return nn.Sequential(*layers)
def features(self, x, y):
x = self.layer0(x)
for m in self.layer1._modules.values():
x = m(x, y)
for m in self.layer2._modules.values():
x = m(x, y)
for m in self.layer3._modules.values():
x = m(x, y)
for m in self.layer4._modules.values():
x = m(x, y)
return x
def hashing(self, x):
x = self.avg_pool(x)
if self.dropout is not None:
x = self.dropout(x)
x = x.view(x.size(0), -1)
features = x
x = self.second_last_linear(x)
return x, features
def logits(self, x):
x = self.last_linear(x)
return x
def forward(self, x, y):
x = self.features(x, y)
x = self.hashing(x)
x = self.logits(x)
return x
def initialize_pretrained_model(model, num_classes, settings):
assert num_classes == settings['num_classes'], \
'num_classes should be {}, but is {}'.format(
settings['num_classes'], num_classes)
model.load_state_dict(model_zoo.load_url(settings['url']))
model.input_space = settings['input_space']
model.input_size = settings['input_size']
model.input_range = settings['input_range']
model.mean = settings['mean']
model.std = settings['std']
def initialize_pretrained_model_ext(model, num_classes, settings):
pretrained_state_dict = model_zoo.load_url(settings['url'])
for kk,vv in pretrained_state_dict.items():
if 'se_module.fc1.weight' in kk:
pretrained_state_dict[kk] = torch.squeeze(vv)
if 'se_module.fc2.weight' in kk:
pretrained_state_dict[kk] = torch.cat([torch.squeeze(vv), torch.zeros(vv.size()[0], 1)], dim=1)
if num_classes != settings['num_classes'] or model.ems:
del(pretrained_state_dict['last_linear.weight'])
del(pretrained_state_dict['last_linear.bias'])
model_dict = model.state_dict()
trash_vars = [k for k in pretrained_state_dict.keys() if k not in model_dict.keys()]
print('trashed vars from resume dict:')
print(trash_vars)
resume_dict = {k: v for k, v in pretrained_state_dict.items() if k in model_dict}
model_dict.update(resume_dict)
model.load_state_dict(model_dict)
model.input_space = settings['input_space']
model.input_size = settings['input_size']
model.input_range = settings['input_range']
model.mean = settings['mean']
model.std = settings['std']
def initialize_pretrained_model_hashing(model, hashing_dim, num_classes, settings):
pretrained_state_dict = model_zoo.load_url(settings['url'])
for kk,vv in pretrained_state_dict.items():
if 'se_module.fc1.weight' in kk:
pretrained_state_dict[kk] = torch.squeeze(vv)
if 'se_module.fc2.weight' in kk:
pretrained_state_dict[kk] = torch.cat([torch.squeeze(vv), torch.zeros(vv.size()[0], 1)], dim=1)
#pdb.set_trace()
old_weight = pretrained_state_dict['last_linear.weight']
print(old_weight.shape)
u, s, vh = np.linalg.svd(old_weight, full_matrices=True)
# u_new = u[:hashing_dim,:hashing_dim]
s_new = np.diag(s)[:hashing_dim,:hashing_dim]
# vh_new = vh[:hashing_dim,:]
new_1 = u[:, :hashing_dim]
new_2 = np.dot(s_new, vh[:hashing_dim, :])
# new_weight = np.dot(u_new, np.dot(s_new, vh_new))
print(new_1.shape, new_2.shape)
del(pretrained_state_dict['last_linear.weight'])
del(pretrained_state_dict['last_linear.bias'])
model_dict = model.state_dict()
trash_vars = [k for k in pretrained_state_dict.keys() if k not in model_dict.keys()]
print('trashed vars from resume dict:')
print(trash_vars)
resume_dict = {k: v for k, v in pretrained_state_dict.items() if k in model_dict}
resume_dict['second_last_linear.weight'] = torch.from_numpy(new_2)
resume_dict['last_linear.weight'] = torch.from_numpy(new_1)
model_dict.update(resume_dict)
model.load_state_dict(model_dict)
model.input_space = settings['input_space']
model.input_size = settings['input_size']
model.input_range = settings['input_range']
model.mean = settings['mean']
model.std = settings['std']
def cse_resnet50(num_classes=1000, pretrained='imagenet', ems=False):
model = CSENet(CSEResNetBottleneck, [3, 4, 6, 3], groups=1, reduction=16,
dropout_p=None, inplanes=64, input_3x3=False,
downsample_kernel_size=1, downsample_padding=0,
num_classes=num_classes, ems=ems)
if pretrained is not None:
settings = pretrained_settings['se_resnet50'][pretrained]
initialize_pretrained_model_ext(model, num_classes, settings)
return model
def cse_resnet50_hashing(hashing_dim, num_classes=1000, pretrained='imagenet', ems=False, module='CSE'):
if module == 'SSE':
model = CSENet_hashing(SSEResNetBottleneck, [3, 4, 6, 3], groups=1, reduction=16,
dropout_p=None, inplanes=64, input_3x3=False,
downsample_kernel_size=1, downsample_padding=0,
num_classes=num_classes, ems=ems, hashing_dim=hashing_dim)
elif module == 'CSSE':
model = CSENet_hashing(CSSEResNetBottleneck, [3, 4, 6, 3], groups=1, reduction=16,
dropout_p=None, inplanes=64, input_3x3=False,
downsample_kernel_size=1, downsample_padding=0,
num_classes=num_classes, ems=ems, hashing_dim=hashing_dim)
else:
model = CSENet_hashing(CSEResNetBottleneck, [3, 4, 6, 3], groups=1, reduction=16,
dropout_p=None, inplanes=64, input_3x3=False,
downsample_kernel_size=1, downsample_padding=0,
num_classes=num_classes, ems=ems, hashing_dim=hashing_dim)
if pretrained is not None:
settings = pretrained_settings['se_resnet50'][pretrained]
initialize_pretrained_model_hashing(model, hashing_dim, num_classes, settings)
return model
# def senet154(num_classes=1000, pretrained='imagenet'):
# model = SENet(SEBottleneck, [3, 8, 36, 3], groups=64, reduction=16,
# dropout_p=0.2, num_classes=num_classes)
# if pretrained is not None:
# settings = pretrained_settings['senet154'][pretrained]
# initialize_pretrained_model(model, num_classes, settings)
# return model
# def se_resnet50(num_classes=1000, pretrained='imagenet'):
# model = SENet(SEResNetBottleneck, [3, 4, 6, 3], groups=1, reduction=16,
# dropout_p=None, inplanes=64, input_3x3=False,
# downsample_kernel_size=1, downsample_padding=0,
# num_classes=num_classes)
# if pretrained is not None:
# settings = pretrained_settings['se_resnet50'][pretrained]
# initialize_pretrained_model(model, num_classes, settings)
# return model
# def se_resnet101(num_classes=1000, pretrained='imagenet'):
# model = SENet(SEResNetBottleneck, [3, 4, 23, 3], groups=1, reduction=16,
# dropout_p=None, inplanes=64, input_3x3=False,
# downsample_kernel_size=1, downsample_padding=0,
# num_classes=num_classes)
# if pretrained is not None:
# settings = pretrained_settings['se_resnet101'][pretrained]
# initialize_pretrained_model(model, num_classes, settings)
# return model
# def se_resnet152(num_classes=1000, pretrained='imagenet'):
# model = SENet(SEResNetBottleneck, [3, 8, 36, 3], groups=1, reduction=16,
# dropout_p=None, inplanes=64, input_3x3=False,
# downsample_kernel_size=1, downsample_padding=0,
# num_classes=num_classes)
# if pretrained is not None:
# settings = pretrained_settings['se_resnet152'][pretrained]
# initialize_pretrained_model(model, num_classes, settings)
# return model
# def se_resnext50_32x4d(num_classes=1000, pretrained='imagenet'):
# model = SENet(SEResNeXtBottleneck, [3, 4, 6, 3], groups=32, reduction=16,
# dropout_p=None, inplanes=64, input_3x3=False,
# downsample_kernel_size=1, downsample_padding=0,
# num_classes=num_classes)
# if pretrained is not None:
# settings = pretrained_settings['se_resnext50_32x4d'][pretrained]
# initialize_pretrained_model(model, num_classes, settings)
# return model
# def se_resnext101_32x4d(num_classes=1000, pretrained='imagenet'):
# model = SENet(SEResNeXtBottleneck, [3, 4, 23, 3], groups=32, reduction=16,
# dropout_p=None, inplanes=64, input_3x3=False,
# downsample_kernel_size=1, downsample_padding=0,
# num_classes=num_classes)
# if pretrained is not None:
# settings = pretrained_settings['se_resnext101_32x4d'][pretrained]
# initialize_pretrained_model(model, num_classes, settings)
# return model
| 37.989055
| 118
| 0.572802
| 4,673
| 38,179
| 4.53242
| 0.067837
| 0.029273
| 0.029273
| 0.014448
| 0.890935
| 0.874929
| 0.858876
| 0.842493
| 0.82238
| 0.80255
| 0
| 0.047012
| 0.309699
| 38,179
| 1,004
| 119
| 38.026892
| 0.756631
| 0.294298
| 0
| 0.769616
| 0
| 0
| 0.061357
| 0.000962
| 0
| 0
| 0
| 0
| 0.001669
| 1
| 0.053422
| false
| 0
| 0.013356
| 0
| 0.12187
| 0.011686
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
49fd09f92d60bd8b848a81fbcc2c761607840330
| 5,143
|
py
|
Python
|
tests/data/test_feature_selection.py
|
DiogoM1/MBINF_SIB
|
19e77896a8b5788747e51b20613c2f3b2888cca1
|
[
"Apache-2.0"
] | null | null | null |
tests/data/test_feature_selection.py
|
DiogoM1/MBINF_SIB
|
19e77896a8b5788747e51b20613c2f3b2888cca1
|
[
"Apache-2.0"
] | null | null | null |
tests/data/test_feature_selection.py
|
DiogoM1/MBINF_SIB
|
19e77896a8b5788747e51b20613c2f3b2888cca1
|
[
"Apache-2.0"
] | null | null | null |
import unittest
# noinspection DuplicatedCode
class TestVarianceThreshold(unittest.TestCase):
"""
Test Conditions
- Use a labeled dataset
"""
def setUp(self):
from si.data import Dataset
from si.data import feature_selection as fs
self.filename = "datasets/lr-example1.data"
self.dataset = Dataset.from_data(self.filename, labeled=True)
# set the threshold
self.vt = fs.VarianceThreshold(0)
self.assertWarns(Warning, fs.VarianceThreshold, -13)
def test_fit(self):
self.vt.fit(self.dataset)
self.assertGreater(len(self.vt._var), 0)
def test_transform(self):
self.vt.fit(self.dataset)
self.vt_transform = self.vt.transform(self.dataset)
self.assertEqual(self.vt_transform.X.shape, self.dataset.X.shape)
def test_transform_inline(self):
self.vt.fit(self.dataset)
self.vt_transform = self.vt.transform(self.dataset)
self.assertEqual(self.vt_transform.X.shape, self.dataset.X.shape)
def test_fit_transform(self):
self.vt_transform = self.vt.fit_transform(self.dataset)
self.vt.fit_transform(self.dataset, inline=True)
self.assertEqual(self.vt_transform.X.shape, self.dataset.X.shape)
def test_fit_transform_inline(self):
self.vt_transform = self.vt.fit_transform(self.dataset)
self.vt.fit_transform(self.dataset, inline=True)
self.assertEqual(self.vt_transform.X.shape, self.dataset.X.shape)
class TestFClassif(unittest.TestCase):
"""
"""
def setUp(self):
from si.data import Dataset
self.filename = "datasets/hearts.data"
self.dataset = Dataset.from_data(self.filename, labeled=True)
def test_f_classif(self):
from si.data.feature_selection import f_classif
F, p = f_classif(self.dataset)
self.assertEqual(F.shape, (13,))
self.assertEqual(p.shape, (13,))
class TestFRegression(unittest.TestCase):
"""
"""
def setUp(self):
from si.data import Dataset
self.filename = "datasets/hearts.data"
self.dataset = Dataset.from_data(self.filename, labeled=True)
def test_f_regression(self):
from si.data.feature_selection import f_regression
F, p = f_regression(self.dataset)
self.assertEqual(F.shape, (13,))
self.assertEqual(p.shape, (13,))
class TestKBestClassif(unittest.TestCase):
"""
"""
def setUp(self):
from si.data import Dataset
from si.data.feature_selection import KBest
from si.data.feature_selection import f_classif
self.filename = "datasets/hearts.data"
self.dataset = Dataset.from_data(self.filename, labeled=True)
self.KBest = KBest(10, "f_classif", )
self.assertEqual(self.KBest.k, 10)
self.assertEqual(self.KBest._func, f_classif)
self.assertRaises(Exception, KBest, 10, "t_classif")
self.assertRaises(Exception, KBest, -12, "f_classif")
def test_fit(self):
self.KBest.fit(self.dataset)
self.assertEqual(len(self.KBest.F), 13)
self.assertEqual(len(self.KBest.p), 13)
def test_transform(self):
from si.data.feature_selection import KBest
self.KBest.fit(self.dataset)
self.KBest_transform = self.KBest.transform(self.dataset)
self.assertEqual(self.KBest_transform.X.shape, (self.dataset.X.shape[0], self.KBest.k))
kb = KBest(23, "f_classif")
kb.fit(self.dataset)
self.assertWarns(Warning, kb.transform, self.dataset)
def test_fit_transform(self):
self.KBest_transform = self.KBest.fit_transform(self.dataset)
self.assertEqual(self.KBest_transform.X.shape, (self.dataset.X.shape[0], self.KBest.k))
def test_fit_transform_inline(self):
self.KBest_transform = self.KBest.fit_transform(self.dataset)
self.KBest.fit_transform(self.dataset, inline=True)
self.assertEqual(self.KBest_transform.X.shape, self.dataset.X.shape)
class TestKBestRegression(TestKBestClassif):
def setUp(self):
from si.data import Dataset
from si.data.feature_selection import KBest
from si.data.feature_selection import f_regression
self.filename = "datasets/hearts.data"
self.dataset = Dataset.from_data(self.filename, labeled=True)
self.KBest = KBest(10, "f_regression")
self.assertEqual(self.KBest.k, 10)
self.assertEqual(self.KBest._func, f_regression)
def test_fit(self):
self.KBest.fit(self.dataset)
self.assertEqual(len(self.KBest.F), 13)
self.assertEqual(len(self.KBest.p), 13)
def test_transform(self):
self.KBest.fit(self.dataset)
self.KBest_transform = self.KBest.transform(self.dataset)
self.assertEqual(self.KBest_transform.X.shape, (self.dataset.X.shape[0], self.KBest.k))
def test_fit_transform(self):
self.KBest_transform = self.KBest.fit_transform(self.dataset)
self.assertEqual(self.KBest_transform.X.shape, (self.dataset.X.shape[0], self.KBest.k))
if __name__ == '__main__':
unittest.main()
| 34.516779
| 95
| 0.677037
| 672
| 5,143
| 5.052083
| 0.10119
| 0.119882
| 0.083947
| 0.076583
| 0.841826
| 0.807953
| 0.80648
| 0.7838
| 0.749043
| 0.739323
| 0
| 0.00954
| 0.205133
| 5,143
| 148
| 96
| 34.75
| 0.820939
| 0.016916
| 0
| 0.715686
| 0
| 0
| 0.032187
| 0.004998
| 0
| 0
| 0
| 0
| 0.254902
| 1
| 0.186275
| false
| 0
| 0.137255
| 0
| 0.372549
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b703fca86d4639a099979abab9448d0fdb219ca9
| 5,885
|
py
|
Python
|
getpy/getpy.py
|
atom-moyer/getpy
|
8d5a846d030d345408a4dc71793d5918521180c4
|
[
"MIT"
] | 83
|
2019-04-14T05:39:34.000Z
|
2022-03-28T18:18:03.000Z
|
getpy/getpy.py
|
atom-moyer/getpy
|
8d5a846d030d345408a4dc71793d5918521180c4
|
[
"MIT"
] | 5
|
2019-04-30T16:23:10.000Z
|
2021-03-26T12:15:52.000Z
|
getpy/getpy.py
|
atom-moyer/getpy
|
8d5a846d030d345408a4dc71793d5918521180c4
|
[
"MIT"
] | 5
|
2019-04-29T19:15:15.000Z
|
2022-03-04T19:08:50.000Z
|
from collections.abc import MutableMapping, MutableSet
from .getpy_types import dict_types
from .getpy_types import set_types
from .getpy_types import multidict_types
class Dict(MutableMapping):
def __init__(self, key_type, value_type, default_value=None, filename=None, safe_mode=False):
self.__key_type = key_type
self.__value_type = value_type
self.__dict_type = dict_types[(key_type, value_type)]
if default_value is None:
self.__dict = self.__dict_type()
else:
self.__dict = self.__dict_type(default_value)
if filename is not None:
self.__dict.load(filename)
self.__safe_mode = safe_mode
def __repr__(self):
return '{' + ', '.join(['{key} : {value}'.format(**vars()) for key, value in zip(*self.items())]) + '}'
def __getitem__(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
return self.__dict.__getitem__(key)
def __setitem__(self, key, value):
if self.__safe_mode: assert key.dtype == self.__key_type and value.dtype == self.__value_type, (key.dtype, value.dtype)
self.__dict.__setitem__(key, value)
def iadd(self, key, value):
if self.__safe_mode: assert key.dtype == self.__key_type and value.dtype == self.__value_type, (key.dtype, value.dtype)
self.__dict.iadd(key, value)
def isub(self, key, value):
if self.__safe_mode: assert key.dtype == self.__key_type and value.dtype == self.__value_type, (key.dtype, value.dtype)
self.__dict.isub(key, value)
def ior(self, key, value):
if self.__safe_mode: assert key.dtype == self.__key_type and value.dtype == self.__value_type, (key.dtype, value.dtype)
self.__dict.ior(key, value)
def iand(self, key, value):
if self.__safe_mode: assert key.dtype == self.__key_type and value.dtype == self.__value_type, (key.dtype, value.dtype)
self.__dict.iand(key, value)
def __delitem__(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
return self.__dict.__delitem__(key)
def contains(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
return self.__dict.contains(key)
__contains__ = contains
def __len__(self):
return self.__dict.__len__()
def keys(self):
return self.__dict.keys()
def values(self):
return self.__dict.values()
def items(self):
return self.__dict.items()
def __iter__(self):
for key in self.keys():
yield key
def dump(self, filename):
return self.__dict.dump(filename)
def load(self, filename):
return self.__dict.load(filename)
class Set(MutableSet):
def __init__(self, key_type, filename=None, safe_mode=False):
self.__key_type = key_type
self.__set_type = set_types[key_type]
self.__set = self.__set_type()
if filename is not None:
self.__set.load(filename)
self.__safe_mode = safe_mode
def __repr__(self):
return '{' + ', '.join(['{key}'.format(**vars()) for key in self.items()]) + '}'
def add(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
self.__set.add(key)
def discard(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
return self.__set.discard(key)
def contains(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
return self.__set.contains(key)
__contains__ = contains
def __len__(self):
return self.__set.__len__()
def items(self):
return self.__set.items()
def __iter__(self):
for item in self.items():
yield item
def dump(self, filename):
return self.__set.dump(filename)
def load(self, filename):
return self.__set.load(filename)
class MultiDict(MutableMapping):
def __init__(self, key_type, value_type, default_value=None, filename=None, safe_mode=False):
self.__key_type = key_type
self.__value_type = value_type
self.__dict_type = multidict_types[(key_type, value_type)]
if default_value is None:
self.__dict = self.__dict_type()
else:
self.__dict = self.__dict_type(default_value)
if filename is not None:
self.__dict.load(filename)
self.__safe_mode = safe_mode
def __repr__(self):
return '{' + ', '.join(['{key} : {value}'.format(**vars()) for key, value in zip(*self.items())]) + '}'
def __getitem__(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
return self.__dict.__getitem__(key)
def __setitem__(self, key, value):
if self.__safe_mode: assert key.dtype == self.__key_type and value.dtype == self.__value_type, (key.dtype, value.dtype)
self.__dict.__setitem__(key, value)
def __delitem__(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
return self.__dict.__delitem__(key)
def contains(self, key):
if self.__safe_mode: assert key.dtype == self.__key_type, (key.dtype,)
return self.__dict.contains(key)
__contains__ = contains
def __len__(self):
return self.__dict.__len__()
def keys(self):
return self.__dict.keys()
def values(self):
return self.__dict.values()
def items(self):
return self.__dict.items()
def __iter__(self):
for key in self.keys():
yield key
def dump(self, filename):
return self.__dict.dump(filename)
def load(self, filename):
return self.__dict.load(filename)
| 26.509009
| 127
| 0.636873
| 779
| 5,885
| 4.310655
| 0.074454
| 0.075045
| 0.068791
| 0.062537
| 0.893687
| 0.85944
| 0.843955
| 0.843955
| 0.831745
| 0.817749
| 0
| 0
| 0.2435
| 5,885
| 221
| 128
| 26.628959
| 0.754268
| 0
| 0
| 0.748092
| 0
| 0
| 0.007986
| 0
| 0
| 0
| 0
| 0
| 0.114504
| 1
| 0.305344
| false
| 0
| 0.030534
| 0.145038
| 0.587786
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
b72a08453f9c8c421f6efe61aa9d4386f67a0fa4
| 2,711
|
py
|
Python
|
solutions/959_regions_cut_by_slashes.py
|
YiqunPeng/leetcode_pro
|
7e6376984f9baec49a5e827d98330fe3d1b656f0
|
[
"MIT"
] | null | null | null |
solutions/959_regions_cut_by_slashes.py
|
YiqunPeng/leetcode_pro
|
7e6376984f9baec49a5e827d98330fe3d1b656f0
|
[
"MIT"
] | null | null | null |
solutions/959_regions_cut_by_slashes.py
|
YiqunPeng/leetcode_pro
|
7e6376984f9baec49a5e827d98330fe3d1b656f0
|
[
"MIT"
] | null | null | null |
class Solution:
def regionsBySlashes(self, grid: List[str]) -> int:
n = len(grid)
seen = set()
res = 0
for i in range(n):
for j in range(n):
if (i, j, 0) not in seen:
self._dfs(grid, i, j, 0, seen)
res += 1
if (i, j, 1) not in seen:
self._dfs(grid, i, j, 1, seen)
res += 1
return res
def _dfs(self, grid, i, j, a, seen):
if (i, j, a) in seen:
return
n = len(grid)
seen.add((i, j, a))
if grid[i][j] == ' ':
if a == 0:
self._dfs(grid, i, j, 1, seen)
if i > 0:
if grid[i-1][j] in [' ', '/']:
self._dfs(grid, i-1, j, 1, seen)
else:
self._dfs(grid, i-1, j, 0, seen)
if j > 0:
self._dfs(grid, i, j-1, 1, seen)
else:
self._dfs(grid, i, j, 0, seen)
if j + 1 < n:
self._dfs(grid, i, j+1, 0, seen)
if i + 1 < n:
if grid[i+1][j] in ['/', ' ']:
self._dfs(grid, i+1, j, 0, seen)
else:
self._dfs(grid, i+1, j, 1, seen)
elif grid[i][j] == '\\':
if a == 0:
if j > 0:
self._dfs(grid, i, j-1, 1, seen)
if i + 1 < n:
if grid[i+1][j] in ['/', ' ']:
self._dfs(grid, i+1, j, 0, seen)
else:
self._dfs(grid, i+1, j, 1, seen)
else:
if j + 1 < n:
self._dfs(grid, i, j+1, 0, seen)
if i > 0:
if grid[i-1][j] in [' ', '/']:
self._dfs(grid, i-1, j, 1, seen)
else:
self._dfs(grid, i-1, j, 0, seen)
else:
if a == 0:
if i > 0:
if grid[i-1][j] in [' ', '/']:
self._dfs(grid, i-1, j, 1, seen)
else:
self._dfs(grid, i-1, j, 0, seen)
if j > 0:
self._dfs(grid, i, j-1, 1, seen)
else:
if j + 1 < n:
self._dfs(grid, i, j+1, 0, seen)
if i + 1 < n:
if grid[i+1][j] in ['/', ' ']:
self._dfs(grid, i+1, j, 0, seen)
else:
self._dfs(grid, i+1, j, 1, seen)
| 36.635135
| 76
| 0.30284
| 346
| 2,711
| 2.306358
| 0.083815
| 0.194236
| 0.303258
| 0.330827
| 0.766917
| 0.766917
| 0.743108
| 0.689223
| 0.634085
| 0.629073
| 0
| 0.054726
| 0.555146
| 2,711
| 73
| 77
| 37.136986
| 0.606965
| 0
| 0
| 0.791667
| 0
| 0
| 0.005533
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027778
| false
| 0
| 0
| 0
| 0.069444
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b732ab5311484cea3292295bf95550f078494646
| 1,201
|
py
|
Python
|
config.py
|
wk910930/mask_rcnn_pytorch
|
21dc137f4dd75384b39a384437b5fbb18f111d9e
|
[
"MIT"
] | 5
|
2017-08-17T02:53:02.000Z
|
2021-10-19T01:44:45.000Z
|
config.py
|
wk910930/mask_rcnn_pytorch
|
21dc137f4dd75384b39a384437b5fbb18f111d9e
|
[
"MIT"
] | null | null | null |
config.py
|
wk910930/mask_rcnn_pytorch
|
21dc137f4dd75384b39a384437b5fbb18f111d9e
|
[
"MIT"
] | 4
|
2017-08-22T14:19:58.000Z
|
2021-03-09T02:04:23.000Z
|
# this is used for storing configurations of datasets & models
datasets = {
'coco-trainval35k-minival': {
'num_classes': 80,
'scale_size': 800,
'train_split': 'annotations/instances_trainval35k2014.json',
'val_split': 'annotations/instances_minival2014.json',
'test_split': 'annotations/image_info_test-dev2015.json',
},
'coco-train-minival': {
'num_classes': 80,
'scale_size': 800,
'train_split': 'annotations/instances_train2014.json',
'val_split': 'annotations/instances_minival2014.json',
'test_split': 'annotations/image_info_test-dev2015.json',
},
'coco-train-val': {
'num_classes': 80,
'scale_size': 800,
'train_split': 'annotations/instances_train2014.json',
'val_split': 'annotations/instances_val2014.json',
'test_split': 'annotations/image_info_test-dev2015.json',
},
'coco-debug': {
'num_classes': 80,
'scale_size': 600,
'train_split': 'annotations/instances_minival2014.json',
'val_split': 'annotations/instances_minival2014.json',
'test_split': 'annotations/image_info_test-dev2015.json',
},
}
| 36.393939
| 68
| 0.64363
| 125
| 1,201
| 5.896
| 0.272
| 0.260516
| 0.27137
| 0.092266
| 0.849389
| 0.772049
| 0.772049
| 0.772049
| 0.772049
| 0.772049
| 0
| 0.076514
| 0.216486
| 1,201
| 32
| 69
| 37.53125
| 0.706695
| 0.049958
| 0
| 0.533333
| 0
| 0
| 0.640913
| 0.424934
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
b73603765b0c5bc12dd0cac44693c209958c5427
| 5,973
|
py
|
Python
|
mlchain/rpc/server/protos/mlchain_pb2_grpc.py
|
Nguyen-ATrung/mlchain-public-dev
|
7d5609d4d556e706e67d8759f2be7ffd55b25557
|
[
"MIT"
] | 1
|
2020-08-26T03:35:38.000Z
|
2020-08-26T03:35:38.000Z
|
mlchain/rpc/server/protos/mlchain_pb2_grpc.py
|
Nguyen-ATrung/mlchain-public-dev
|
7d5609d4d556e706e67d8759f2be7ffd55b25557
|
[
"MIT"
] | 4
|
2021-03-25T23:55:12.000Z
|
2022-03-12T00:36:03.000Z
|
mlchain/rpc/server/protos/mlchain_pb2_grpc.py
|
attrung/mlchain-public-travisci-test
|
9dca6ad2e05ae85068e2ca09f7bce2dc1bc916fa
|
[
"MIT"
] | 1
|
2020-08-12T08:34:42.000Z
|
2020-08-12T08:34:42.000Z
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
from . import mlchain_pb2 as mlchain__pb2
class MLChainServiceStub(object):
"""Service definition
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.get_params = channel.unary_unary(
'/MLChainService/get_params',
request_serializer=mlchain__pb2.String.SerializeToString,
response_deserializer=mlchain__pb2.Byte.FromString,
)
self.des_func = channel.unary_unary(
'/MLChainService/des_func',
request_serializer=mlchain__pb2.String.SerializeToString,
response_deserializer=mlchain__pb2.Byte.FromString,
)
self.ping = channel.unary_unary(
'/MLChainService/ping',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=mlchain__pb2.Byte.FromString,
)
self.description = channel.unary_unary(
'/MLChainService/description',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=mlchain__pb2.Byte.FromString,
)
self.list_all_function = channel.unary_unary(
'/MLChainService/list_all_function',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=mlchain__pb2.Byte.FromString,
)
self.list_all_function_and_description = channel.unary_unary(
'/MLChainService/list_all_function_and_description',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=mlchain__pb2.Byte.FromString,
)
self.call = channel.unary_unary(
'/MLChainService/call',
request_serializer=mlchain__pb2.Message.SerializeToString,
response_deserializer=mlchain__pb2.Output.FromString,
)
class MLChainServiceServicer(object):
"""Service definition
"""
def get_params(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def des_func(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ping(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def description(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def list_all_function(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def list_all_function_and_description(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def call(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_MLChainServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'get_params': grpc.unary_unary_rpc_method_handler(
servicer.get_params,
request_deserializer=mlchain__pb2.String.FromString,
response_serializer=mlchain__pb2.Byte.SerializeToString,
),
'des_func': grpc.unary_unary_rpc_method_handler(
servicer.des_func,
request_deserializer=mlchain__pb2.String.FromString,
response_serializer=mlchain__pb2.Byte.SerializeToString,
),
'ping': grpc.unary_unary_rpc_method_handler(
servicer.ping,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=mlchain__pb2.Byte.SerializeToString,
),
'description': grpc.unary_unary_rpc_method_handler(
servicer.description,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=mlchain__pb2.Byte.SerializeToString,
),
'list_all_function': grpc.unary_unary_rpc_method_handler(
servicer.list_all_function,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=mlchain__pb2.Byte.SerializeToString,
),
'list_all_function_and_description': grpc.unary_unary_rpc_method_handler(
servicer.list_all_function_and_description,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=mlchain__pb2.Byte.SerializeToString,
),
'call': grpc.unary_unary_rpc_method_handler(
servicer.call,
request_deserializer=mlchain__pb2.Message.FromString,
response_serializer=mlchain__pb2.Output.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'MLChainService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 39.82
| 86
| 0.745856
| 643
| 5,973
| 6.564541
| 0.127527
| 0.05212
| 0.066335
| 0.042644
| 0.805496
| 0.77067
| 0.762615
| 0.704809
| 0.690595
| 0.690595
| 0
| 0.006505
| 0.176461
| 5,973
| 149
| 87
| 40.087248
| 0.851596
| 0.092248
| 0
| 0.504274
| 1
| 0
| 0.115485
| 0.035648
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.059829
| 0.025641
| 0
| 0.119658
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
3f8605e438c9634601224a138e92e4131e397ced
| 96
|
py
|
Python
|
Basics/converters.py
|
caseysalvador/Python
|
19bc762a123e98ebbac427c69ce58925e507b045
|
[
"MIT"
] | null | null | null |
Basics/converters.py
|
caseysalvador/Python
|
19bc762a123e98ebbac427c69ce58925e507b045
|
[
"MIT"
] | null | null | null |
Basics/converters.py
|
caseysalvador/Python
|
19bc762a123e98ebbac427c69ce58925e507b045
|
[
"MIT"
] | null | null | null |
def lbs_to_kg(weight):
return weight * 0.45
def kg_to_lbs(weight):
return weight / 0.45
| 19.2
| 24
| 0.6875
| 18
| 96
| 3.444444
| 0.444444
| 0.387097
| 0.580645
| 0.612903
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078947
| 0.208333
| 96
| 5
| 25
| 19.2
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
b75c523d77b0e60eeab592eace82ee9c374e1a23
| 40
|
py
|
Python
|
grzegorz_clients/__init__.py
|
Programvareverkstedet/grzegroz_clients
|
70a707aa97b8b544185c51d34d62dc3351641020
|
[
"BSD-3-Clause"
] | 1
|
2019-11-25T21:14:00.000Z
|
2019-11-25T21:14:00.000Z
|
grzegorz_clients/__init__.py
|
Programvareverkstedet/grzegorz_clients
|
70a707aa97b8b544185c51d34d62dc3351641020
|
[
"BSD-3-Clause"
] | null | null | null |
grzegorz_clients/__init__.py
|
Programvareverkstedet/grzegorz_clients
|
70a707aa97b8b544185c51d34d62dc3351641020
|
[
"BSD-3-Clause"
] | null | null | null |
from . import api
from . import remi_ui
| 13.333333
| 21
| 0.75
| 7
| 40
| 4.142857
| 0.714286
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 40
| 2
| 22
| 20
| 0.90625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b7ebcdea68e0337b3be08767ffef649922caac33
| 3,636
|
py
|
Python
|
asq/test/test_extension.py
|
sixty-north/asq
|
389f647a7b7a2c4f64af22820713d2c3658d5068
|
[
"MIT"
] | 175
|
2016-01-09T20:07:00.000Z
|
2022-01-03T17:10:01.000Z
|
asq/test/test_extension.py
|
sixty-north/asq
|
389f647a7b7a2c4f64af22820713d2c3658d5068
|
[
"MIT"
] | 13
|
2016-01-02T14:56:48.000Z
|
2020-11-25T17:12:45.000Z
|
asq/test/test_extension.py
|
sixty-north/asq
|
389f647a7b7a2c4f64af22820713d2c3658d5068
|
[
"MIT"
] | 17
|
2016-01-02T14:57:00.000Z
|
2020-11-03T14:18:09.000Z
|
import unittest
from asq._portability import function_name
from asq.extension import add_method, extend
__author__ = 'Sixty North'
class TestExtension(unittest.TestCase):
def test_add_method_default_name(self):
class Extendee(object):
pass
instance = Extendee()
def method(self):
"This is the test extension method."
return "The result of method()"
add_method(method, Extendee)
self.assertTrue(hasattr(Extendee, "method"))
self.assertTrue(hasattr(instance, "method"))
self.assertEqual(method.__doc__, Extendee.method.__doc__)
self.assertEqual(function_name(method), function_name(Extendee.method))
self.assertEqual(instance.method(), "The result of method()")
def test_add_method_with_name(self):
class Extendee(object):
pass
instance = Extendee()
def method(self):
"This is the test extension method."
return "The result of method()"
add_method(method, Extendee, "foo")
self.assertTrue(hasattr(Extendee, "foo"))
self.assertTrue(hasattr(instance, "foo"))
self.assertEqual(method.__doc__, Extendee.foo.__doc__)
self.assertEqual(function_name(method), function_name(Extendee.foo))
self.assertEqual(instance.foo(), "The result of method()")
def test_add_method_with_existing_name(self):
class Extendee(object):
def foo(self):
return "This is the original foo"
instance = Extendee()
self.assertFalse(hasattr(Extendee, "method"))
self.assertFalse(hasattr(instance, "method"))
def method(self):
"This is the test extension method."
return "The result of method()"
self.assertRaises(ValueError, lambda: add_method(method, Extendee, "foo"))
def test_extend_decorator(self):
class Extendee(object):
pass
instance = Extendee()
@extend(Extendee)
def method(self):
"This is the test extension method."
return "The result of method()"
self.assertTrue(hasattr(Extendee, "method"))
self.assertTrue(hasattr(instance, "method"))
self.assertEqual(method.__doc__, Extendee.method.__doc__)
self.assertEqual(instance.method(), "The result of method()")
self.assertEqual(function_name(method), function_name(Extendee.method))
def test_extend_decorator_with_name(self):
class Extendee(object):
pass
instance = Extendee()
@extend(Extendee, "foo")
def method(self):
"This is the test extension method."
return "The result of method()"
self.assertTrue(hasattr(Extendee, "foo"))
self.assertTrue(hasattr(instance, "foo"))
self.assertEqual(method.__doc__, Extendee.foo.__doc__)
self.assertEqual(function_name(method), function_name(Extendee.foo))
self.assertEqual(instance.foo(), "The result of method()")
def test_extend_decorator_with_existing_name(self):
class Extendee(object):
def foo(self):
return "This is the original foo"
def perform_extension():
@extend(Extendee, "foo")
def method(self):
"This is the test extension method."
return "The result of method()"
self.assertRaises(ValueError, perform_extension)
| 29.322581
| 83
| 0.60451
| 378
| 3,636
| 5.613757
| 0.119048
| 0.080113
| 0.051838
| 0.080113
| 0.85344
| 0.839774
| 0.822337
| 0.822337
| 0.794062
| 0.67672
| 0
| 0
| 0.296205
| 3,636
| 123
| 84
| 29.560976
| 0.82923
| 0.057481
| 0
| 0.75641
| 0
| 0
| 0.154569
| 0
| 0
| 0
| 0
| 0
| 0.307692
| 1
| 0.192308
| false
| 0.051282
| 0.038462
| 0.025641
| 0.423077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
4d49b998edd87692544a571af93740533b33f9ed
| 149
|
py
|
Python
|
pdsando/etl/schema/__init__.py
|
sando-io/pdsando
|
9f9cbf74b4ec189acb17958771149d32b737866a
|
[
"Apache-2.0"
] | null | null | null |
pdsando/etl/schema/__init__.py
|
sando-io/pdsando
|
9f9cbf74b4ec189acb17958771149d32b737866a
|
[
"Apache-2.0"
] | null | null | null |
pdsando/etl/schema/__init__.py
|
sando-io/pdsando
|
9f9cbf74b4ec189acb17958771149d32b737866a
|
[
"Apache-2.0"
] | null | null | null |
from pdsando.etl.schema.arrow import ArrowSchema
try:
from pdsando.etl.schema.spark import SparkSchema
except ModuleNotFoundError as e:
pass
| 24.833333
| 52
| 0.798658
| 20
| 149
| 5.95
| 0.75
| 0.184874
| 0.235294
| 0.336134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147651
| 149
| 6
| 53
| 24.833333
| 0.937008
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
4d67077d741d457aa72c1cb506c42b6282252969
| 307
|
py
|
Python
|
vit/formatter/scheduled.py
|
kinifwyne/vit
|
e2cbafce922b1e09c4a66e7dc9592c51fe628e9d
|
[
"MIT"
] | 179
|
2020-07-28T08:21:51.000Z
|
2022-03-30T21:39:37.000Z
|
vit/formatter/scheduled.py
|
kinifwyne/vit
|
e2cbafce922b1e09c4a66e7dc9592c51fe628e9d
|
[
"MIT"
] | 255
|
2017-02-01T11:49:12.000Z
|
2020-07-26T22:31:25.000Z
|
vit/formatter/scheduled.py
|
kinifwyne/vit
|
e2cbafce922b1e09c4a66e7dc9592c51fe628e9d
|
[
"MIT"
] | 26
|
2017-01-17T20:31:13.000Z
|
2020-06-17T13:09:01.000Z
|
from vit.formatter import DateTime
class Scheduled(DateTime):
def get_scheduled_state(self, scheduled, task):
return self.formatter.get_scheduled_state(scheduled, task)
def colorize(self, scheduled, task):
return self.colorizer.scheduled(self.get_scheduled_state(scheduled, task))
| 34.111111
| 82
| 0.762215
| 38
| 307
| 6
| 0.394737
| 0.22807
| 0.223684
| 0.201754
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149837
| 307
| 8
| 83
| 38.375
| 0.873563
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.166667
| 0.333333
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
4d7576e6726066e0586b3b533d7fc51ed4523167
| 181
|
py
|
Python
|
ScienceDynamics/datasets/__init__.py
|
data4goodlab/ScienceDynamics
|
1ba24a7a0ec64058b6095541b0ecc5d5d294b588
|
[
"MIT"
] | 1
|
2020-09-29T15:41:58.000Z
|
2020-09-29T15:41:58.000Z
|
ScienceDynamics/datasets/__init__.py
|
data4goodlab/ScienceDynamics
|
1ba24a7a0ec64058b6095541b0ecc5d5d294b588
|
[
"MIT"
] | null | null | null |
ScienceDynamics/datasets/__init__.py
|
data4goodlab/ScienceDynamics
|
1ba24a7a0ec64058b6095541b0ecc5d5d294b588
|
[
"MIT"
] | 1
|
2020-11-12T18:15:25.000Z
|
2020-11-12T18:15:25.000Z
|
from ScienceDynamics.datasets.microsoft_academic_graph import MicrosoftAcademicGraph
from ScienceDynamics.datasets.sjr import SJR
from ScienceDynamics.datasets.aminer import Aminer
| 45.25
| 84
| 0.900552
| 20
| 181
| 8.05
| 0.5
| 0.354037
| 0.503106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066298
| 181
| 3
| 85
| 60.333333
| 0.952663
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4db6672a058c7bd11df7082e096156da202e2cb3
| 54,490
|
py
|
Python
|
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/senderrange_15dfd9e6673a6986869f84e8c22d0879.py
|
Vibaswan/ixnetwork_restpy
|
239fedc7050890746cbabd71ea1e91c68d9e5cad
|
[
"MIT"
] | null | null | null |
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/senderrange_15dfd9e6673a6986869f84e8c22d0879.py
|
Vibaswan/ixnetwork_restpy
|
239fedc7050890746cbabd71ea1e91c68d9e5cad
|
[
"MIT"
] | null | null | null |
ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/senderrange_15dfd9e6673a6986869f84e8c22d0879.py
|
Vibaswan/ixnetwork_restpy
|
239fedc7050890746cbabd71ea1e91c68d9e5cad
|
[
"MIT"
] | null | null | null |
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class SenderRange(Base):
"""Holds the information related to the originating routers for the MPLS tunnels being simulated in Ingress cases.
The SenderRange class encapsulates a list of senderRange resources that are managed by the user.
A list of resources can be retrieved from the server using the SenderRange.find() method.
The list can be managed by using the SenderRange.add() and SenderRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'senderRange'
_SDM_ATT_MAP = {
'AutoGenerateSessionName': 'autoGenerateSessionName',
'BackupLspIdPoolStart': 'backupLspIdPoolStart',
'Bandwidth': 'bandwidth',
'BandwidthProtectionDesired': 'bandwidthProtectionDesired',
'EnableBfdMpls': 'enableBfdMpls',
'EnableFastReroute': 'enableFastReroute',
'EnableLspPing': 'enableLspPing',
'EnablePathReoptimization': 'enablePathReoptimization',
'EnablePeriodicReEvaluationRequest': 'enablePeriodicReEvaluationRequest',
'EnableResourceAffinities': 'enableResourceAffinities',
'Enabled': 'enabled',
'ExcludeAny': 'excludeAny',
'FastRerouteBandwidth': 'fastRerouteBandwidth',
'FastRerouteDetour': 'fastRerouteDetour',
'FastRerouteExcludeAny': 'fastRerouteExcludeAny',
'FastRerouteFacilityBackupDesired': 'fastRerouteFacilityBackupDesired',
'FastRerouteHoldingPriority': 'fastRerouteHoldingPriority',
'FastRerouteHopLimit': 'fastRerouteHopLimit',
'FastRerouteIncludeAll': 'fastRerouteIncludeAll',
'FastRerouteIncludeAny': 'fastRerouteIncludeAny',
'FastRerouteOne2OneBackupDesired': 'fastRerouteOne2OneBackupDesired',
'FastRerouteSendDetour': 'fastRerouteSendDetour',
'FastRerouteSetupPriority': 'fastRerouteSetupPriority',
'HoldingPriority': 'holdingPriority',
'IncludeAll': 'includeAll',
'IncludeAny': 'includeAny',
'IpCount': 'ipCount',
'IpStart': 'ipStart',
'LabelRecordingDesired': 'labelRecordingDesired',
'LocalProtectionDesired': 'localProtectionDesired',
'LspIdCount': 'lspIdCount',
'LspIdStart': 'lspIdStart',
'MaximumPacketSize': 'maximumPacketSize',
'MinimumPolicedUnit': 'minimumPolicedUnit',
'NodeProtectionDesired': 'nodeProtectionDesired',
'PathTearTlv': 'pathTearTlv',
'PathTlv': 'pathTlv',
'PeakDataRate': 'peakDataRate',
'ReEvaluationRequestInterval': 'reEvaluationRequestInterval',
'RefreshInterval': 'refreshInterval',
'SeStyleDesired': 'seStyleDesired',
'SessionName': 'sessionName',
'SetupPriority': 'setupPriority',
'TimeoutMultiplier': 'timeoutMultiplier',
'TokenBucketRate': 'tokenBucketRate',
'TokenBucketSize': 'tokenBucketSize',
}
def __init__(self, parent):
super(SenderRange, self).__init__(parent)
@property
def TunnelHeadToLeaf(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.tunnelheadtoleaf_e69a9a69601e0735ed9794ff412c72e6.TunnelHeadToLeaf): An instance of the TunnelHeadToLeaf class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.tunnelheadtoleaf_e69a9a69601e0735ed9794ff412c72e6 import TunnelHeadToLeaf
if self._properties.get('TunnelHeadToLeaf', None) is None:
return TunnelHeadToLeaf(self)
else:
return self._properties.get('TunnelHeadToLeaf')
@property
def TunnelHeadTrafficEndPoint(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.tunnelheadtrafficendpoint_399e6e14fa13954b413c4572ebd3725e.TunnelHeadTrafficEndPoint): An instance of the TunnelHeadTrafficEndPoint class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.tunnelheadtrafficendpoint_399e6e14fa13954b413c4572ebd3725e import TunnelHeadTrafficEndPoint
if self._properties.get('TunnelHeadTrafficEndPoint', None) is None:
return TunnelHeadTrafficEndPoint(self)
else:
return self._properties.get('TunnelHeadTrafficEndPoint')
@property
def AutoGenerateSessionName(self):
"""
Returns
-------
- bool: If enabled, the session name is generated automatically. If it is not enabled, the session name field is activated and must be filled in.
"""
return self._get_attribute(self._SDM_ATT_MAP['AutoGenerateSessionName'])
@AutoGenerateSessionName.setter
def AutoGenerateSessionName(self, value):
self._set_attribute(self._SDM_ATT_MAP['AutoGenerateSessionName'], value)
@property
def BackupLspIdPoolStart(self):
"""
Returns
-------
- number: It helps to set the LSP Id for the re-optimized LSP.
"""
return self._get_attribute(self._SDM_ATT_MAP['BackupLspIdPoolStart'])
@BackupLspIdPoolStart.setter
def BackupLspIdPoolStart(self, value):
self._set_attribute(self._SDM_ATT_MAP['BackupLspIdPoolStart'], value)
@property
def Bandwidth(self):
"""
Returns
-------
- str: The bandwidth requested for the connection, expressed in kbits/sec.
"""
return self._get_attribute(self._SDM_ATT_MAP['Bandwidth'])
@Bandwidth.setter
def Bandwidth(self, value):
self._set_attribute(self._SDM_ATT_MAP['Bandwidth'], value)
@property
def BandwidthProtectionDesired(self):
"""
Returns
-------
- bool: Indicates that PLRs should skip at least the next node for a backup path.
"""
return self._get_attribute(self._SDM_ATT_MAP['BandwidthProtectionDesired'])
@BandwidthProtectionDesired.setter
def BandwidthProtectionDesired(self, value):
self._set_attribute(self._SDM_ATT_MAP['BandwidthProtectionDesired'], value)
@property
def EnableBfdMpls(self):
"""
Returns
-------
- bool: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableBfdMpls'])
@EnableBfdMpls.setter
def EnableBfdMpls(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableBfdMpls'], value)
@property
def EnableFastReroute(self):
"""
Returns
-------
- bool: Enables the use of the fast reroute feature.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableFastReroute'])
@EnableFastReroute.setter
def EnableFastReroute(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableFastReroute'], value)
@property
def EnableLspPing(self):
"""
Returns
-------
- bool: NOT DEFINED
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableLspPing'])
@EnableLspPing.setter
def EnableLspPing(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableLspPing'], value)
@property
def EnablePathReoptimization(self):
"""
Returns
-------
- bool: If true, enables the Path Re-optimization option.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnablePathReoptimization'])
@EnablePathReoptimization.setter
def EnablePathReoptimization(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnablePathReoptimization'], value)
@property
def EnablePeriodicReEvaluationRequest(self):
"""
Returns
-------
- bool: If true, enables the head LSR to send periodic path re-evaluation request in every Re-Optimization Interval.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnablePeriodicReEvaluationRequest'])
@EnablePeriodicReEvaluationRequest.setter
def EnablePeriodicReEvaluationRequest(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnablePeriodicReEvaluationRequest'], value)
@property
def EnableResourceAffinities(self):
"""
Returns
-------
- bool: Enables the use of RSVP resource class affinities for LSP tunnels.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableResourceAffinities'])
@EnableResourceAffinities.setter
def EnableResourceAffinities(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableResourceAffinities'], value)
@property
def Enabled(self):
"""
Returns
-------
- bool: Enables the sender range entry.
"""
return self._get_attribute(self._SDM_ATT_MAP['Enabled'])
@Enabled.setter
def Enabled(self, value):
self._set_attribute(self._SDM_ATT_MAP['Enabled'], value)
@property
def ExcludeAny(self):
"""
Returns
-------
- number: Represents a set of attribute filters associated with a tunnel, any of which renders a link unacceptable.
"""
return self._get_attribute(self._SDM_ATT_MAP['ExcludeAny'])
@ExcludeAny.setter
def ExcludeAny(self, value):
self._set_attribute(self._SDM_ATT_MAP['ExcludeAny'], value)
@property
def FastRerouteBandwidth(self):
"""
Returns
-------
- str: An estimate of the bandwidth needed for the protection path.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteBandwidth'])
@FastRerouteBandwidth.setter
def FastRerouteBandwidth(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteBandwidth'], value)
@property
def FastRerouteDetour(self):
"""
Returns
-------
- list(dict(arg1:str,arg2:str)): Used to provide backup LSP tunnels for local repair of LSP tunnels, in the event of failure of a node or link. Contains the specifics of the detour LSPs: nodes to use and nodes to avoid.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteDetour'])
@FastRerouteDetour.setter
def FastRerouteDetour(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteDetour'], value)
@property
def FastRerouteExcludeAny(self):
"""
Returns
-------
- number: Capability filters used to dictate which backup paths are acceptable or unacceptable.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteExcludeAny'])
@FastRerouteExcludeAny.setter
def FastRerouteExcludeAny(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteExcludeAny'], value)
@property
def FastRerouteFacilityBackupDesired(self):
"""
Returns
-------
- bool: If enabled, indicates that facility backup should be used. With this method, the MPLS label stack allows the creation of a bypass tunnel to protect a set of LSPs with similar characteristics/constraints. Protects both links and nodes.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteFacilityBackupDesired'])
@FastRerouteFacilityBackupDesired.setter
def FastRerouteFacilityBackupDesired(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteFacilityBackupDesired'], value)
@property
def FastRerouteHoldingPriority(self):
"""
Returns
-------
- number: The priority value for the backup path, pertaining to holding resources - whether a session can be preempted BY another session.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteHoldingPriority'])
@FastRerouteHoldingPriority.setter
def FastRerouteHoldingPriority(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteHoldingPriority'], value)
@property
def FastRerouteHopLimit(self):
"""
Returns
-------
- number: Indicates the number of extra hops that may be added by a protection path.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteHopLimit'])
@FastRerouteHopLimit.setter
def FastRerouteHopLimit(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteHopLimit'], value)
@property
def FastRerouteIncludeAll(self):
"""
Returns
-------
- number: Capability filters used to dictate which backup paths are acceptable or unacceptable.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteIncludeAll'])
@FastRerouteIncludeAll.setter
def FastRerouteIncludeAll(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteIncludeAll'], value)
@property
def FastRerouteIncludeAny(self):
"""
Returns
-------
- number: Capability filters used to dictate which backup paths are acceptable or unacceptable.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteIncludeAny'])
@FastRerouteIncludeAny.setter
def FastRerouteIncludeAny(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteIncludeAny'], value)
@property
def FastRerouteOne2OneBackupDesired(self):
"""
Returns
-------
- bool: If enabled, indicates that one-to-one backup should be used. With this method, one detour LSP will be created for each protected LSP for each place where the LSP could potentially be repaired locally. Protects both links and nodes.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteOne2OneBackupDesired'])
@FastRerouteOne2OneBackupDesired.setter
def FastRerouteOne2OneBackupDesired(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteOne2OneBackupDesired'], value)
@property
def FastRerouteSendDetour(self):
"""
Returns
-------
- bool: Enables the generation of a DETOUR object for one to one operation.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteSendDetour'])
@FastRerouteSendDetour.setter
def FastRerouteSendDetour(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteSendDetour'], value)
@property
def FastRerouteSetupPriority(self):
"""
Returns
-------
- number: Indicate the priority for taking and holding resources along the backup path.
"""
return self._get_attribute(self._SDM_ATT_MAP['FastRerouteSetupPriority'])
@FastRerouteSetupPriority.setter
def FastRerouteSetupPriority(self, value):
self._set_attribute(self._SDM_ATT_MAP['FastRerouteSetupPriority'], value)
@property
def HoldingPriority(self):
"""
Returns
-------
- number: Priority in holding onto resources. Range is 0 to 7, with 0 the highest priority.
"""
return self._get_attribute(self._SDM_ATT_MAP['HoldingPriority'])
@HoldingPriority.setter
def HoldingPriority(self, value):
self._set_attribute(self._SDM_ATT_MAP['HoldingPriority'], value)
@property
def IncludeAll(self):
"""
Returns
-------
- number: 32-bit value. Represents a set of attribute filters associated with a tunnel, all of which must be present for a link to be acceptable (with respect to this test). When all bits are set to 0 (null set), it automatically passes.
"""
return self._get_attribute(self._SDM_ATT_MAP['IncludeAll'])
@IncludeAll.setter
def IncludeAll(self, value):
self._set_attribute(self._SDM_ATT_MAP['IncludeAll'], value)
@property
def IncludeAny(self):
"""
Returns
-------
- number: 32-bit value. Represents a set of attribute filters associated with a tunnel, any of which makes a link acceptable (with respect to this test). When all bits are set to 0 (null set), it automatically passes.
"""
return self._get_attribute(self._SDM_ATT_MAP['IncludeAny'])
@IncludeAny.setter
def IncludeAny(self, value):
self._set_attribute(self._SDM_ATT_MAP['IncludeAny'], value)
@property
def IpCount(self):
"""
Returns
-------
- number: The number of routers in the destination range.
"""
return self._get_attribute(self._SDM_ATT_MAP['IpCount'])
@IpCount.setter
def IpCount(self, value):
self._set_attribute(self._SDM_ATT_MAP['IpCount'], value)
@property
def IpStart(self):
"""
Returns
-------
- str: The IP address of the first destination router.
"""
return self._get_attribute(self._SDM_ATT_MAP['IpStart'])
@IpStart.setter
def IpStart(self, value):
self._set_attribute(self._SDM_ATT_MAP['IpStart'], value)
@property
def LabelRecordingDesired(self):
"""
Returns
-------
- bool: If enabled, indicates that label information is to be included when doing a route record.
"""
return self._get_attribute(self._SDM_ATT_MAP['LabelRecordingDesired'])
@LabelRecordingDesired.setter
def LabelRecordingDesired(self, value):
self._set_attribute(self._SDM_ATT_MAP['LabelRecordingDesired'], value)
@property
def LocalProtectionDesired(self):
"""
Returns
-------
- bool: (Enabled by default) This permits transit routers to use a local traffic rerouting repair mechanism in the event of a fault on an adjacent downstream link or node. This may result in a violation of the explicit route object.
"""
return self._get_attribute(self._SDM_ATT_MAP['LocalProtectionDesired'])
@LocalProtectionDesired.setter
def LocalProtectionDesired(self, value):
self._set_attribute(self._SDM_ATT_MAP['LocalProtectionDesired'], value)
@property
def LspIdCount(self):
"""
Returns
-------
- number: The number of LSP IDs in the range.
"""
return self._get_attribute(self._SDM_ATT_MAP['LspIdCount'])
@LspIdCount.setter
def LspIdCount(self, value):
self._set_attribute(self._SDM_ATT_MAP['LspIdCount'], value)
@property
def LspIdStart(self):
"""
Returns
-------
- number: The first label-switched path ID (LSP ID) value in the range of LSP IDs.
"""
return self._get_attribute(self._SDM_ATT_MAP['LspIdStart'])
@LspIdStart.setter
def LspIdStart(self, value):
self._set_attribute(self._SDM_ATT_MAP['LspIdStart'], value)
@property
def MaximumPacketSize(self):
"""
Returns
-------
- number: 32-bit integer. The maximum number of bytes allowed to cross the interface in a transmitted packet.
"""
return self._get_attribute(self._SDM_ATT_MAP['MaximumPacketSize'])
@MaximumPacketSize.setter
def MaximumPacketSize(self, value):
self._set_attribute(self._SDM_ATT_MAP['MaximumPacketSize'], value)
@property
def MinimumPolicedUnit(self):
"""
Returns
-------
- number: 32-bit integer. The minimum allowable size for a policed unit.
"""
return self._get_attribute(self._SDM_ATT_MAP['MinimumPolicedUnit'])
@MinimumPolicedUnit.setter
def MinimumPolicedUnit(self, value):
self._set_attribute(self._SDM_ATT_MAP['MinimumPolicedUnit'], value)
@property
def NodeProtectionDesired(self):
"""
Returns
-------
- bool: For Fast Reroute - if enabled, sets the Node Protection Desired Flag in the Session_Attribute object of the RRO message. It indicates to PLRs associated with the protected LSP path, that a backup path is desired that bypasses (avoids) at least the next node on the LSP.
"""
return self._get_attribute(self._SDM_ATT_MAP['NodeProtectionDesired'])
@NodeProtectionDesired.setter
def NodeProtectionDesired(self, value):
self._set_attribute(self._SDM_ATT_MAP['NodeProtectionDesired'], value)
@property
def PathTearTlv(self):
"""
Returns
-------
- list(dict(arg1:number,arg2:number,arg3:str)): A set of custom TLVs to be included in TEAR messages, constructed with the rsvpCustomTlv command.
"""
return self._get_attribute(self._SDM_ATT_MAP['PathTearTlv'])
@PathTearTlv.setter
def PathTearTlv(self, value):
self._set_attribute(self._SDM_ATT_MAP['PathTearTlv'], value)
@property
def PathTlv(self):
"""
Returns
-------
- list(dict(arg1:number,arg2:number,arg3:str)): A set of custom TLVs to be included in PATH messages, constructed with the rsvpCustomTlv command.
"""
return self._get_attribute(self._SDM_ATT_MAP['PathTlv'])
@PathTlv.setter
def PathTlv(self, value):
self._set_attribute(self._SDM_ATT_MAP['PathTlv'], value)
@property
def PeakDataRate(self):
"""
Returns
-------
- number: The maximum traffic rate that can be maintained. The policing mechanism allows some burstiness, but restricts it so the overall packet transmission rate is less than the rate at which tokens.
"""
return self._get_attribute(self._SDM_ATT_MAP['PeakDataRate'])
@PeakDataRate.setter
def PeakDataRate(self, value):
self._set_attribute(self._SDM_ATT_MAP['PeakDataRate'], value)
@property
def ReEvaluationRequestInterval(self):
"""
Returns
-------
- number: Represents the time period (in milliseconds) at which the path re-evaluation request is sent by the head LSR. The default value is: 180000 ms (3 mins).
"""
return self._get_attribute(self._SDM_ATT_MAP['ReEvaluationRequestInterval'])
@ReEvaluationRequestInterval.setter
def ReEvaluationRequestInterval(self, value):
self._set_attribute(self._SDM_ATT_MAP['ReEvaluationRequestInterval'], value)
@property
def RefreshInterval(self):
"""
Returns
-------
- number: The interval between summary refresh messages.
"""
return self._get_attribute(self._SDM_ATT_MAP['RefreshInterval'])
@RefreshInterval.setter
def RefreshInterval(self, value):
self._set_attribute(self._SDM_ATT_MAP['RefreshInterval'], value)
@property
def SeStyleDesired(self):
"""
Returns
-------
- bool: This indicates that the tunnel ingress node may reroute this tunnel without tearing it down. A tunnel egress node should use the SE Style when responding with an RESV message.
"""
return self._get_attribute(self._SDM_ATT_MAP['SeStyleDesired'])
@SeStyleDesired.setter
def SeStyleDesired(self, value):
self._set_attribute(self._SDM_ATT_MAP['SeStyleDesired'], value)
@property
def SessionName(self):
"""
Returns
-------
- str: If enableAutoSessionName is not set, this is the name assigned to this session.
"""
return self._get_attribute(self._SDM_ATT_MAP['SessionName'])
@SessionName.setter
def SessionName(self, value):
self._set_attribute(self._SDM_ATT_MAP['SessionName'], value)
@property
def SetupPriority(self):
"""
Returns
-------
- number: This is the session priority with respect to taking resources, such as preempting another session. The valid range is from 0 to 7. The highest priority is indicated by 0.
"""
return self._get_attribute(self._SDM_ATT_MAP['SetupPriority'])
@SetupPriority.setter
def SetupPriority(self, value):
self._set_attribute(self._SDM_ATT_MAP['SetupPriority'], value)
@property
def TimeoutMultiplier(self):
"""
Returns
-------
- number: The number of Hellos before a neighbor is declared dead.
"""
return self._get_attribute(self._SDM_ATT_MAP['TimeoutMultiplier'])
@TimeoutMultiplier.setter
def TimeoutMultiplier(self, value):
self._set_attribute(self._SDM_ATT_MAP['TimeoutMultiplier'], value)
@property
def TokenBucketRate(self):
"""
Returns
-------
- number: The rate of transfer for data in a flow. In this application, it is used with a traffic policing mechanism. The data tokens enter the bucket, filling the bucket. The data from a number of tokens is combined to form and send a packet. The goal is to determine a rate which will not overflow the specified token bucket size, and cause new data (tokens) to be rejected/discarded.
"""
return self._get_attribute(self._SDM_ATT_MAP['TokenBucketRate'])
@TokenBucketRate.setter
def TokenBucketRate(self, value):
self._set_attribute(self._SDM_ATT_MAP['TokenBucketRate'], value)
@property
def TokenBucketSize(self):
"""
Returns
-------
- number: The maximum capacity (in bytes) the token bucket can hold, and above which newly received tokens cannot be processed and are discarded.
"""
return self._get_attribute(self._SDM_ATT_MAP['TokenBucketSize'])
@TokenBucketSize.setter
def TokenBucketSize(self, value):
self._set_attribute(self._SDM_ATT_MAP['TokenBucketSize'], value)
def update(self, AutoGenerateSessionName=None, BackupLspIdPoolStart=None, Bandwidth=None, BandwidthProtectionDesired=None, EnableBfdMpls=None, EnableFastReroute=None, EnableLspPing=None, EnablePathReoptimization=None, EnablePeriodicReEvaluationRequest=None, EnableResourceAffinities=None, Enabled=None, ExcludeAny=None, FastRerouteBandwidth=None, FastRerouteDetour=None, FastRerouteExcludeAny=None, FastRerouteFacilityBackupDesired=None, FastRerouteHoldingPriority=None, FastRerouteHopLimit=None, FastRerouteIncludeAll=None, FastRerouteIncludeAny=None, FastRerouteOne2OneBackupDesired=None, FastRerouteSendDetour=None, FastRerouteSetupPriority=None, HoldingPriority=None, IncludeAll=None, IncludeAny=None, IpCount=None, IpStart=None, LabelRecordingDesired=None, LocalProtectionDesired=None, LspIdCount=None, LspIdStart=None, MaximumPacketSize=None, MinimumPolicedUnit=None, NodeProtectionDesired=None, PathTearTlv=None, PathTlv=None, PeakDataRate=None, ReEvaluationRequestInterval=None, RefreshInterval=None, SeStyleDesired=None, SessionName=None, SetupPriority=None, TimeoutMultiplier=None, TokenBucketRate=None, TokenBucketSize=None):
"""Updates senderRange resource on the server.
Args
----
- AutoGenerateSessionName (bool): If enabled, the session name is generated automatically. If it is not enabled, the session name field is activated and must be filled in.
- BackupLspIdPoolStart (number): It helps to set the LSP Id for the re-optimized LSP.
- Bandwidth (str): The bandwidth requested for the connection, expressed in kbits/sec.
- BandwidthProtectionDesired (bool): Indicates that PLRs should skip at least the next node for a backup path.
- EnableBfdMpls (bool): NOT DEFINED
- EnableFastReroute (bool): Enables the use of the fast reroute feature.
- EnableLspPing (bool): NOT DEFINED
- EnablePathReoptimization (bool): If true, enables the Path Re-optimization option.
- EnablePeriodicReEvaluationRequest (bool): If true, enables the head LSR to send periodic path re-evaluation request in every Re-Optimization Interval.
- EnableResourceAffinities (bool): Enables the use of RSVP resource class affinities for LSP tunnels.
- Enabled (bool): Enables the sender range entry.
- ExcludeAny (number): Represents a set of attribute filters associated with a tunnel, any of which renders a link unacceptable.
- FastRerouteBandwidth (str): An estimate of the bandwidth needed for the protection path.
- FastRerouteDetour (list(dict(arg1:str,arg2:str))): Used to provide backup LSP tunnels for local repair of LSP tunnels, in the event of failure of a node or link. Contains the specifics of the detour LSPs: nodes to use and nodes to avoid.
- FastRerouteExcludeAny (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteFacilityBackupDesired (bool): If enabled, indicates that facility backup should be used. With this method, the MPLS label stack allows the creation of a bypass tunnel to protect a set of LSPs with similar characteristics/constraints. Protects both links and nodes.
- FastRerouteHoldingPriority (number): The priority value for the backup path, pertaining to holding resources - whether a session can be preempted BY another session.
- FastRerouteHopLimit (number): Indicates the number of extra hops that may be added by a protection path.
- FastRerouteIncludeAll (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteIncludeAny (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteOne2OneBackupDesired (bool): If enabled, indicates that one-to-one backup should be used. With this method, one detour LSP will be created for each protected LSP for each place where the LSP could potentially be repaired locally. Protects both links and nodes.
- FastRerouteSendDetour (bool): Enables the generation of a DETOUR object for one to one operation.
- FastRerouteSetupPriority (number): Indicate the priority for taking and holding resources along the backup path.
- HoldingPriority (number): Priority in holding onto resources. Range is 0 to 7, with 0 the highest priority.
- IncludeAll (number): 32-bit value. Represents a set of attribute filters associated with a tunnel, all of which must be present for a link to be acceptable (with respect to this test). When all bits are set to 0 (null set), it automatically passes.
- IncludeAny (number): 32-bit value. Represents a set of attribute filters associated with a tunnel, any of which makes a link acceptable (with respect to this test). When all bits are set to 0 (null set), it automatically passes.
- IpCount (number): The number of routers in the destination range.
- IpStart (str): The IP address of the first destination router.
- LabelRecordingDesired (bool): If enabled, indicates that label information is to be included when doing a route record.
- LocalProtectionDesired (bool): (Enabled by default) This permits transit routers to use a local traffic rerouting repair mechanism in the event of a fault on an adjacent downstream link or node. This may result in a violation of the explicit route object.
- LspIdCount (number): The number of LSP IDs in the range.
- LspIdStart (number): The first label-switched path ID (LSP ID) value in the range of LSP IDs.
- MaximumPacketSize (number): 32-bit integer. The maximum number of bytes allowed to cross the interface in a transmitted packet.
- MinimumPolicedUnit (number): 32-bit integer. The minimum allowable size for a policed unit.
- NodeProtectionDesired (bool): For Fast Reroute - if enabled, sets the Node Protection Desired Flag in the Session_Attribute object of the RRO message. It indicates to PLRs associated with the protected LSP path, that a backup path is desired that bypasses (avoids) at least the next node on the LSP.
- PathTearTlv (list(dict(arg1:number,arg2:number,arg3:str))): A set of custom TLVs to be included in TEAR messages, constructed with the rsvpCustomTlv command.
- PathTlv (list(dict(arg1:number,arg2:number,arg3:str))): A set of custom TLVs to be included in PATH messages, constructed with the rsvpCustomTlv command.
- PeakDataRate (number): The maximum traffic rate that can be maintained. The policing mechanism allows some burstiness, but restricts it so the overall packet transmission rate is less than the rate at which tokens.
- ReEvaluationRequestInterval (number): Represents the time period (in milliseconds) at which the path re-evaluation request is sent by the head LSR. The default value is: 180000 ms (3 mins).
- RefreshInterval (number): The interval between summary refresh messages.
- SeStyleDesired (bool): This indicates that the tunnel ingress node may reroute this tunnel without tearing it down. A tunnel egress node should use the SE Style when responding with an RESV message.
- SessionName (str): If enableAutoSessionName is not set, this is the name assigned to this session.
- SetupPriority (number): This is the session priority with respect to taking resources, such as preempting another session. The valid range is from 0 to 7. The highest priority is indicated by 0.
- TimeoutMultiplier (number): The number of Hellos before a neighbor is declared dead.
- TokenBucketRate (number): The rate of transfer for data in a flow. In this application, it is used with a traffic policing mechanism. The data tokens enter the bucket, filling the bucket. The data from a number of tokens is combined to form and send a packet. The goal is to determine a rate which will not overflow the specified token bucket size, and cause new data (tokens) to be rejected/discarded.
- TokenBucketSize (number): The maximum capacity (in bytes) the token bucket can hold, and above which newly received tokens cannot be processed and are discarded.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, AutoGenerateSessionName=None, BackupLspIdPoolStart=None, Bandwidth=None, BandwidthProtectionDesired=None, EnableBfdMpls=None, EnableFastReroute=None, EnableLspPing=None, EnablePathReoptimization=None, EnablePeriodicReEvaluationRequest=None, EnableResourceAffinities=None, Enabled=None, ExcludeAny=None, FastRerouteBandwidth=None, FastRerouteDetour=None, FastRerouteExcludeAny=None, FastRerouteFacilityBackupDesired=None, FastRerouteHoldingPriority=None, FastRerouteHopLimit=None, FastRerouteIncludeAll=None, FastRerouteIncludeAny=None, FastRerouteOne2OneBackupDesired=None, FastRerouteSendDetour=None, FastRerouteSetupPriority=None, HoldingPriority=None, IncludeAll=None, IncludeAny=None, IpCount=None, IpStart=None, LabelRecordingDesired=None, LocalProtectionDesired=None, LspIdCount=None, LspIdStart=None, MaximumPacketSize=None, MinimumPolicedUnit=None, NodeProtectionDesired=None, PathTearTlv=None, PathTlv=None, PeakDataRate=None, ReEvaluationRequestInterval=None, RefreshInterval=None, SeStyleDesired=None, SessionName=None, SetupPriority=None, TimeoutMultiplier=None, TokenBucketRate=None, TokenBucketSize=None):
"""Adds a new senderRange resource on the server and adds it to the container.
Args
----
- AutoGenerateSessionName (bool): If enabled, the session name is generated automatically. If it is not enabled, the session name field is activated and must be filled in.
- BackupLspIdPoolStart (number): It helps to set the LSP Id for the re-optimized LSP.
- Bandwidth (str): The bandwidth requested for the connection, expressed in kbits/sec.
- BandwidthProtectionDesired (bool): Indicates that PLRs should skip at least the next node for a backup path.
- EnableBfdMpls (bool): NOT DEFINED
- EnableFastReroute (bool): Enables the use of the fast reroute feature.
- EnableLspPing (bool): NOT DEFINED
- EnablePathReoptimization (bool): If true, enables the Path Re-optimization option.
- EnablePeriodicReEvaluationRequest (bool): If true, enables the head LSR to send periodic path re-evaluation request in every Re-Optimization Interval.
- EnableResourceAffinities (bool): Enables the use of RSVP resource class affinities for LSP tunnels.
- Enabled (bool): Enables the sender range entry.
- ExcludeAny (number): Represents a set of attribute filters associated with a tunnel, any of which renders a link unacceptable.
- FastRerouteBandwidth (str): An estimate of the bandwidth needed for the protection path.
- FastRerouteDetour (list(dict(arg1:str,arg2:str))): Used to provide backup LSP tunnels for local repair of LSP tunnels, in the event of failure of a node or link. Contains the specifics of the detour LSPs: nodes to use and nodes to avoid.
- FastRerouteExcludeAny (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteFacilityBackupDesired (bool): If enabled, indicates that facility backup should be used. With this method, the MPLS label stack allows the creation of a bypass tunnel to protect a set of LSPs with similar characteristics/constraints. Protects both links and nodes.
- FastRerouteHoldingPriority (number): The priority value for the backup path, pertaining to holding resources - whether a session can be preempted BY another session.
- FastRerouteHopLimit (number): Indicates the number of extra hops that may be added by a protection path.
- FastRerouteIncludeAll (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteIncludeAny (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteOne2OneBackupDesired (bool): If enabled, indicates that one-to-one backup should be used. With this method, one detour LSP will be created for each protected LSP for each place where the LSP could potentially be repaired locally. Protects both links and nodes.
- FastRerouteSendDetour (bool): Enables the generation of a DETOUR object for one to one operation.
- FastRerouteSetupPriority (number): Indicate the priority for taking and holding resources along the backup path.
- HoldingPriority (number): Priority in holding onto resources. Range is 0 to 7, with 0 the highest priority.
- IncludeAll (number): 32-bit value. Represents a set of attribute filters associated with a tunnel, all of which must be present for a link to be acceptable (with respect to this test). When all bits are set to 0 (null set), it automatically passes.
- IncludeAny (number): 32-bit value. Represents a set of attribute filters associated with a tunnel, any of which makes a link acceptable (with respect to this test). When all bits are set to 0 (null set), it automatically passes.
- IpCount (number): The number of routers in the destination range.
- IpStart (str): The IP address of the first destination router.
- LabelRecordingDesired (bool): If enabled, indicates that label information is to be included when doing a route record.
- LocalProtectionDesired (bool): (Enabled by default) This permits transit routers to use a local traffic rerouting repair mechanism in the event of a fault on an adjacent downstream link or node. This may result in a violation of the explicit route object.
- LspIdCount (number): The number of LSP IDs in the range.
- LspIdStart (number): The first label-switched path ID (LSP ID) value in the range of LSP IDs.
- MaximumPacketSize (number): 32-bit integer. The maximum number of bytes allowed to cross the interface in a transmitted packet.
- MinimumPolicedUnit (number): 32-bit integer. The minimum allowable size for a policed unit.
- NodeProtectionDesired (bool): For Fast Reroute - if enabled, sets the Node Protection Desired Flag in the Session_Attribute object of the RRO message. It indicates to PLRs associated with the protected LSP path, that a backup path is desired that bypasses (avoids) at least the next node on the LSP.
- PathTearTlv (list(dict(arg1:number,arg2:number,arg3:str))): A set of custom TLVs to be included in TEAR messages, constructed with the rsvpCustomTlv command.
- PathTlv (list(dict(arg1:number,arg2:number,arg3:str))): A set of custom TLVs to be included in PATH messages, constructed with the rsvpCustomTlv command.
- PeakDataRate (number): The maximum traffic rate that can be maintained. The policing mechanism allows some burstiness, but restricts it so the overall packet transmission rate is less than the rate at which tokens.
- ReEvaluationRequestInterval (number): Represents the time period (in milliseconds) at which the path re-evaluation request is sent by the head LSR. The default value is: 180000 ms (3 mins).
- RefreshInterval (number): The interval between summary refresh messages.
- SeStyleDesired (bool): This indicates that the tunnel ingress node may reroute this tunnel without tearing it down. A tunnel egress node should use the SE Style when responding with an RESV message.
- SessionName (str): If enableAutoSessionName is not set, this is the name assigned to this session.
- SetupPriority (number): This is the session priority with respect to taking resources, such as preempting another session. The valid range is from 0 to 7. The highest priority is indicated by 0.
- TimeoutMultiplier (number): The number of Hellos before a neighbor is declared dead.
- TokenBucketRate (number): The rate of transfer for data in a flow. In this application, it is used with a traffic policing mechanism. The data tokens enter the bucket, filling the bucket. The data from a number of tokens is combined to form and send a packet. The goal is to determine a rate which will not overflow the specified token bucket size, and cause new data (tokens) to be rejected/discarded.
- TokenBucketSize (number): The maximum capacity (in bytes) the token bucket can hold, and above which newly received tokens cannot be processed and are discarded.
Returns
-------
- self: This instance with all currently retrieved senderRange resources using find and the newly added senderRange resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained senderRange resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, AutoGenerateSessionName=None, BackupLspIdPoolStart=None, Bandwidth=None, BandwidthProtectionDesired=None, EnableBfdMpls=None, EnableFastReroute=None, EnableLspPing=None, EnablePathReoptimization=None, EnablePeriodicReEvaluationRequest=None, EnableResourceAffinities=None, Enabled=None, ExcludeAny=None, FastRerouteBandwidth=None, FastRerouteDetour=None, FastRerouteExcludeAny=None, FastRerouteFacilityBackupDesired=None, FastRerouteHoldingPriority=None, FastRerouteHopLimit=None, FastRerouteIncludeAll=None, FastRerouteIncludeAny=None, FastRerouteOne2OneBackupDesired=None, FastRerouteSendDetour=None, FastRerouteSetupPriority=None, HoldingPriority=None, IncludeAll=None, IncludeAny=None, IpCount=None, IpStart=None, LabelRecordingDesired=None, LocalProtectionDesired=None, LspIdCount=None, LspIdStart=None, MaximumPacketSize=None, MinimumPolicedUnit=None, NodeProtectionDesired=None, PathTearTlv=None, PathTlv=None, PeakDataRate=None, ReEvaluationRequestInterval=None, RefreshInterval=None, SeStyleDesired=None, SessionName=None, SetupPriority=None, TimeoutMultiplier=None, TokenBucketRate=None, TokenBucketSize=None):
"""Finds and retrieves senderRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve senderRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all senderRange resources from the server.
Args
----
- AutoGenerateSessionName (bool): If enabled, the session name is generated automatically. If it is not enabled, the session name field is activated and must be filled in.
- BackupLspIdPoolStart (number): It helps to set the LSP Id for the re-optimized LSP.
- Bandwidth (str): The bandwidth requested for the connection, expressed in kbits/sec.
- BandwidthProtectionDesired (bool): Indicates that PLRs should skip at least the next node for a backup path.
- EnableBfdMpls (bool): NOT DEFINED
- EnableFastReroute (bool): Enables the use of the fast reroute feature.
- EnableLspPing (bool): NOT DEFINED
- EnablePathReoptimization (bool): If true, enables the Path Re-optimization option.
- EnablePeriodicReEvaluationRequest (bool): If true, enables the head LSR to send periodic path re-evaluation request in every Re-Optimization Interval.
- EnableResourceAffinities (bool): Enables the use of RSVP resource class affinities for LSP tunnels.
- Enabled (bool): Enables the sender range entry.
- ExcludeAny (number): Represents a set of attribute filters associated with a tunnel, any of which renders a link unacceptable.
- FastRerouteBandwidth (str): An estimate of the bandwidth needed for the protection path.
- FastRerouteDetour (list(dict(arg1:str,arg2:str))): Used to provide backup LSP tunnels for local repair of LSP tunnels, in the event of failure of a node or link. Contains the specifics of the detour LSPs: nodes to use and nodes to avoid.
- FastRerouteExcludeAny (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteFacilityBackupDesired (bool): If enabled, indicates that facility backup should be used. With this method, the MPLS label stack allows the creation of a bypass tunnel to protect a set of LSPs with similar characteristics/constraints. Protects both links and nodes.
- FastRerouteHoldingPriority (number): The priority value for the backup path, pertaining to holding resources - whether a session can be preempted BY another session.
- FastRerouteHopLimit (number): Indicates the number of extra hops that may be added by a protection path.
- FastRerouteIncludeAll (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteIncludeAny (number): Capability filters used to dictate which backup paths are acceptable or unacceptable.
- FastRerouteOne2OneBackupDesired (bool): If enabled, indicates that one-to-one backup should be used. With this method, one detour LSP will be created for each protected LSP for each place where the LSP could potentially be repaired locally. Protects both links and nodes.
- FastRerouteSendDetour (bool): Enables the generation of a DETOUR object for one to one operation.
- FastRerouteSetupPriority (number): Indicate the priority for taking and holding resources along the backup path.
- HoldingPriority (number): Priority in holding onto resources. Range is 0 to 7, with 0 the highest priority.
- IncludeAll (number): 32-bit value. Represents a set of attribute filters associated with a tunnel, all of which must be present for a link to be acceptable (with respect to this test). When all bits are set to 0 (null set), it automatically passes.
- IncludeAny (number): 32-bit value. Represents a set of attribute filters associated with a tunnel, any of which makes a link acceptable (with respect to this test). When all bits are set to 0 (null set), it automatically passes.
- IpCount (number): The number of routers in the destination range.
- IpStart (str): The IP address of the first destination router.
- LabelRecordingDesired (bool): If enabled, indicates that label information is to be included when doing a route record.
- LocalProtectionDesired (bool): (Enabled by default) This permits transit routers to use a local traffic rerouting repair mechanism in the event of a fault on an adjacent downstream link or node. This may result in a violation of the explicit route object.
- LspIdCount (number): The number of LSP IDs in the range.
- LspIdStart (number): The first label-switched path ID (LSP ID) value in the range of LSP IDs.
- MaximumPacketSize (number): 32-bit integer. The maximum number of bytes allowed to cross the interface in a transmitted packet.
- MinimumPolicedUnit (number): 32-bit integer. The minimum allowable size for a policed unit.
- NodeProtectionDesired (bool): For Fast Reroute - if enabled, sets the Node Protection Desired Flag in the Session_Attribute object of the RRO message. It indicates to PLRs associated with the protected LSP path, that a backup path is desired that bypasses (avoids) at least the next node on the LSP.
- PathTearTlv (list(dict(arg1:number,arg2:number,arg3:str))): A set of custom TLVs to be included in TEAR messages, constructed with the rsvpCustomTlv command.
- PathTlv (list(dict(arg1:number,arg2:number,arg3:str))): A set of custom TLVs to be included in PATH messages, constructed with the rsvpCustomTlv command.
- PeakDataRate (number): The maximum traffic rate that can be maintained. The policing mechanism allows some burstiness, but restricts it so the overall packet transmission rate is less than the rate at which tokens.
- ReEvaluationRequestInterval (number): Represents the time period (in milliseconds) at which the path re-evaluation request is sent by the head LSR. The default value is: 180000 ms (3 mins).
- RefreshInterval (number): The interval between summary refresh messages.
- SeStyleDesired (bool): This indicates that the tunnel ingress node may reroute this tunnel without tearing it down. A tunnel egress node should use the SE Style when responding with an RESV message.
- SessionName (str): If enableAutoSessionName is not set, this is the name assigned to this session.
- SetupPriority (number): This is the session priority with respect to taking resources, such as preempting another session. The valid range is from 0 to 7. The highest priority is indicated by 0.
- TimeoutMultiplier (number): The number of Hellos before a neighbor is declared dead.
- TokenBucketRate (number): The rate of transfer for data in a flow. In this application, it is used with a traffic policing mechanism. The data tokens enter the bucket, filling the bucket. The data from a number of tokens is combined to form and send a packet. The goal is to determine a rate which will not overflow the specified token bucket size, and cause new data (tokens) to be rejected/discarded.
- TokenBucketSize (number): The maximum capacity (in bytes) the token bucket can hold, and above which newly received tokens cannot be processed and are discarded.
Returns
-------
- self: This instance with matching senderRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of senderRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the senderRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def DoMakeBeforeBreak(self):
"""Executes the doMakeBeforeBreak operation on the server.
NOT DEFINED
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('doMakeBeforeBreak', payload=payload, response_object=None)
def SendReEvaluationRequest(self):
"""Executes the sendReEvaluationRequest operation on the server.
NOT DEFINED
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self.href }
return self._execute('sendReEvaluationRequest', payload=payload, response_object=None)
| 59.747807
| 1,140
| 0.714645
| 6,402
| 54,490
| 6.001562
| 0.07935
| 0.014991
| 0.022487
| 0.032143
| 0.79595
| 0.785644
| 0.740904
| 0.73713
| 0.717089
| 0.649888
| 0
| 0.005452
| 0.209029
| 54,490
| 911
| 1,141
| 59.813392
| 0.886009
| 0.559956
| 0
| 0.144044
| 0
| 0
| 0.162963
| 0.085422
| 0
| 0
| 0
| 0
| 0
| 1
| 0.282548
| false
| 0
| 0.01108
| 0
| 0.459834
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4dd48eb5a05269d3c0ccfdaf430192d3a464d38a
| 474
|
py
|
Python
|
Codewars/7kyu/array-squareup-b/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | 7
|
2017-09-20T16:40:39.000Z
|
2021-08-31T18:15:08.000Z
|
Codewars/7kyu/array-squareup-b/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
Codewars/7kyu/array-squareup-b/Python/test.py
|
RevansChen/online-judge
|
ad1b07fee7bd3c49418becccda904e17505f3018
|
[
"MIT"
] | null | null | null |
# Python - 3.6.0
Test.describe('Basic Tests')
Test.assert_equals(square_up(4), [0, 0, 0, 1, 0, 0, 2, 1, 0, 3, 2, 1, 4, 3, 2, 1], 'sorry')
Test.assert_equals(square_up(9), [0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 1, 0, 0, 0, 0, 0, 0, 3, 2, 1, 0, 0, 0, 0, 0, 4, 3, 2, 1, 0, 0, 0, 0, 5, 4, 3, 2, 1, 0, 0, 0, 6, 5, 4, 3, 2, 1, 0, 0, 7, 6, 5, 4, 3, 2, 1, 0, 8, 7, 6, 5, 4, 3, 2, 1, 9, 8, 7, 6, 5, 4, 3, 2, 1], 'sorry')
Test.assert_equals(square_up(1), [1], 'sorry')
| 59.25
| 286
| 0.474684
| 127
| 474
| 1.724409
| 0.15748
| 0.283105
| 0.30137
| 0.273973
| 0.799087
| 0.675799
| 0.648402
| 0.356164
| 0.30137
| 0.30137
| 0
| 0.287293
| 0.236287
| 474
| 7
| 287
| 67.714286
| 0.31768
| 0.029536
| 0
| 0
| 0
| 0
| 0.056769
| 0
| 0
| 0
| 0
| 0
| 0.75
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1281ceed30236c572643bcbcd1f657fdf210396d
| 10,728
|
py
|
Python
|
tests/unit/test_consumer.py
|
EDITD/queue_util
|
383267388bcfeb1c8bc0cf749f28d5073b067c1b
|
[
"MIT"
] | null | null | null |
tests/unit/test_consumer.py
|
EDITD/queue_util
|
383267388bcfeb1c8bc0cf749f28d5073b067c1b
|
[
"MIT"
] | 21
|
2015-11-20T14:37:38.000Z
|
2020-10-05T11:09:22.000Z
|
tests/unit/test_consumer.py
|
EDITD/queue_util
|
383267388bcfeb1c8bc0cf749f28d5073b067c1b
|
[
"MIT"
] | null | null | null |
try:
import mock
except ImportError:
from unittest import mock
import unittest
from queue_util.consumer import Consumer
class TestConsumer(unittest.TestCase):
@mock.patch('kombu.BrokerConnection', autospec=True)
@mock.patch('statsd.StatsClient', autospec=True)
@mock.patch.object(Consumer, 'post_handle_data')
@mock.patch('logging.exception')
def test_consumer(self, mock_logging_exception, mock_post_handle_data,
mock_statsd, mock_broker_connection):
"""A handle_data generator function"""
# Handler that yields messages the first time it is called and then
# raises a KeyboardInterrupt on subsequent calls.
class HandleData(object):
CALL_COUNT = 0
def handle_data(self, payload):
self.CALL_COUNT += 1
if 1 == self.CALL_COUNT:
yield 'dest_queue', 'payload1'
yield 'dest_queue', 'payload2'
yield 'dest_queue', 'payload3'
else:
raise KeyboardInterrupt
handle_exception = mock.Mock()
c = Consumer(
'source_queue', HandleData().handle_data,
rabbitmq_host='host',
handle_exception=handle_exception,
statsd_host='stats',
)
c.run_forever()
# Connection was created
mock_broker_connection.assert_called_once_with('host')
# Message were fetched from source
source = c.get_queue('source_queue')
expected = [
mock.call(block=True, timeout=None),
mock.call().ack(),
mock.call(block=True, timeout=None),
]
source.get.assert_has_calls(expected)
# Three messages were sent to destination
dest = c.get_queue('dest_queue')
expected = [
mock.call('payload1'),
mock.call('payload2'),
mock.call('payload3'),
]
dest.put.assert_has_calls(expected)
# Post-handle was called
mock_post_handle_data.assert_called_once_with()
# Job was marked as successful
c.statsd_client.incr.assert_called_once_with('success')
# Exception handler was not called
handle_exception.assert_not_called()
# Exception was not logged
mock_logging_exception.assert_not_called()
# Messages were acked and not requeued or rejected
mock_message = source.get()
self.assertEqual(1, mock_message.ack.call_count)
mock_message.requeue.assert_not_called()
mock_message.reject.assert_not_called()
@mock.patch('kombu.BrokerConnection', autospec=True)
@mock.patch('statsd.StatsClient', autospec=True)
@mock.patch.object(Consumer, 'post_handle_data')
@mock.patch('logging.exception')
def test_retry_failures(self, mock_logging_exception, mock_post_handle_data,
mock_statsd, mock_broker_connection):
"""Failed messages are retried"""
# Handler that raises a ValueError the first time it is called and then
# raises a KeyboardInterrupt on subsequent calls.
class HandleData(object):
CALL_COUNT = 0
def handle_data(self, payload):
self.CALL_COUNT += 1
if 1 == self.CALL_COUNT:
raise ValueError('Something bad')
else:
raise KeyboardInterrupt
handle_exception = mock.Mock()
c = Consumer(
'source_queue', HandleData().handle_data,
rabbitmq_host='host',
handle_exception=handle_exception,
statsd_host='stats',
dont_requeue=False,
reject=False,
)
c.run_forever()
# Post-handle was not called
mock_post_handle_data.assert_not_called()
# Job was not marked as successful
c.statsd_client.incr.assert_called_once_with('failure')
# Exception handler was not called
handle_exception.assert_called_once_with()
# Exception was logged
mock_logging_exception.assert_called_once()
# Messages were requeued and not acked or rejected
source = c.get_queue('source_queue')
mock_message = source.get()
mock_message.ack.assert_not_called()
self.assertEqual(1, mock_message.requeue.call_count)
mock_message.reject.assert_not_called()
@mock.patch('kombu.BrokerConnection', autospec=True)
def test_reject_failures(self, mock_broker_connection):
"""Failed messages are rejected"""
# Handler that raises a ValueError the first time it is called and then
# raises a KeyboardInterrupt on subsequent calls.
class HandleData(object):
CALL_COUNT = 0
def handle_data(self, payload):
self.CALL_COUNT += 1
if 1 == self.CALL_COUNT:
raise ValueError('Something bad')
else:
raise KeyboardInterrupt
c = Consumer(
'source_queue', HandleData().handle_data,
rabbitmq_host='host',
dont_requeue=True,
reject=True,
)
c.run_forever()
# Messages were rejected and not acked or requeued
source = c.get_queue('source_queue')
mock_message = source.get()
mock_message.ack.assert_not_called()
mock_message.requeue.assert_not_called()
self.assertEqual(1, mock_message.reject.call_count)
@mock.patch('kombu.BrokerConnection', autospec=True)
@mock.patch('statsd.StatsClient', autospec=True)
@mock.patch.object(Consumer, 'post_handle_data')
@mock.patch('logging.exception')
def test_consumer_batch(self, mock_logging_exception, mock_post_handle_data,
mock_statsd, mock_broker_connection):
"""A handle_data generator function"""
# Handler that yields messages the first time it is called and then
# raises a KeyboardInterrupt on subsequent calls.
class HandleData(object):
CALL_COUNT = 0
def handle_data(self, payloads):
self.CALL_COUNT += 1
if 1 == self.CALL_COUNT:
for payload in payloads:
yield 'dest_queue', payload
else:
raise KeyboardInterrupt
handle_exception = mock.Mock()
c = Consumer(
'source_queue', HandleData().handle_data,
rabbitmq_host='host',
handle_exception=handle_exception,
statsd_host='stats',
)
c.batched_run_forever(size=10)
# Connection was created
mock_broker_connection.assert_called_once_with('host')
# 20 message fetched - handled as two batches of 10 messages
source = c.get_queue('source_queue')
self.assertEqual(20, source.get.call_count)
dest = c.get_queue('dest_queue')
self.assertEqual(10, dest.put.call_count)
# Post-handle was called
mock_post_handle_data.assert_called_once_with()
# Job was marked as successful
c.statsd_client.incr.assert_called_once_with('success')
# Exception handler was not called
handle_exception.assert_not_called()
# Exception was not logged
mock_logging_exception.assert_not_called()
# Messages were acked and not requeued or rejected
mock_message = source.get()
self.assertEqual(10, mock_message.ack.call_count)
mock_message.requeue.assert_not_called()
mock_message.reject.assert_not_called()
@mock.patch('kombu.BrokerConnection', autospec=True)
@mock.patch('statsd.StatsClient', autospec=True)
@mock.patch.object(Consumer, 'post_handle_data')
@mock.patch('logging.exception')
def test_batch_retry_failures(self, mock_logging_exception, mock_post_handle_data,
mock_statsd, mock_broker_connection):
"""Batches of failed messages are retried"""
# Handler that raises a ValueError the first time it is called and then
# raises a KeyboardInterrupt on subsequent calls.
class HandleData(object):
CALL_COUNT = 0
def handle_data(self, payload):
self.CALL_COUNT += 1
if 1 == self.CALL_COUNT:
raise ValueError('Something bad')
else:
raise KeyboardInterrupt
handle_exception = mock.Mock()
c = Consumer(
'source_queue', HandleData().handle_data,
rabbitmq_host='host',
handle_exception=handle_exception,
statsd_host='stats',
dont_requeue=False,
reject=False,
)
c.batched_run_forever(size=10)
# Post-handle was not called
mock_post_handle_data.assert_not_called()
# Job was not marked as successful
c.statsd_client.incr.assert_called_once_with('failure')
# Exception handler was not called
handle_exception.assert_called_once_with()
# Exception was logged
mock_logging_exception.assert_called_once()
# Messages were requeued and not acked or rejected
source = c.get_queue('source_queue')
mock_message = source.get()
mock_message.ack.assert_not_called()
self.assertEqual(10, mock_message.requeue.call_count)
mock_message.reject.assert_not_called()
@mock.patch('kombu.BrokerConnection', autospec=True)
def test_batch_reject_failures(self, mock_broker_connection):
"""Failed messages are rejected"""
# Handler that raises a ValueError the first time it is called and then
# raises a KeyboardInterrupt on subsequent calls.
class HandleData(object):
CALL_COUNT = 0
def handle_data(self, payload):
self.CALL_COUNT += 1
if 1 == self.CALL_COUNT:
raise ValueError('Something bad')
else:
raise KeyboardInterrupt
c = Consumer(
'source_queue', HandleData().handle_data,
rabbitmq_host='host',
dont_requeue=True,
reject=True,
)
c.batched_run_forever(size=10)
# Messages were rejected and not acked or requeued
source = c.get_queue('source_queue')
mock_message = source.get()
mock_message.ack.assert_not_called()
mock_message.requeue.assert_not_called()
self.assertEqual(10, mock_message.reject.call_count)
if '__main__' == __name__:
unittest.main()
| 35.76
| 86
| 0.621271
| 1,201
| 10,728
| 5.302248
| 0.110741
| 0.040829
| 0.0424
| 0.031407
| 0.899969
| 0.894786
| 0.860082
| 0.860082
| 0.855685
| 0.850974
| 0
| 0.006234
| 0.29726
| 10,728
| 299
| 87
| 35.879599
| 0.83844
| 0.168811
| 0
| 0.787129
| 0
| 0
| 0.082278
| 0.014919
| 0
| 0
| 0
| 0
| 0.19802
| 1
| 0.059406
| false
| 0
| 0.024752
| 0
| 0.118812
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
429c05bffb4b4b05e92330618730d58542186f72
| 124,990
|
py
|
Python
|
easybase/hbase/THBaseService.py
|
ao-O/easybase
|
b565f5c6b16ee7c71451d84298d97276c72173af
|
[
"MIT"
] | null | null | null |
easybase/hbase/THBaseService.py
|
ao-O/easybase
|
b565f5c6b16ee7c71451d84298d97276c72173af
|
[
"MIT"
] | null | null | null |
easybase/hbase/THBaseService.py
|
ao-O/easybase
|
b565f5c6b16ee7c71451d84298d97276c72173af
|
[
"MIT"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.3)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
import logging
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def exists(self, table, tget):
"""
Test for the existence of columns in the table, as specified in the TGet.
@return true if the specified TGet matches one or more keys, false if not
Parameters:
- table: the table to check on
- tget: the TGet to check for
"""
pass
def get(self, table, tget):
"""
Method for getting data from a row.
If the row cannot be found an empty Result is returned.
This can be checked by the empty field of the TResult
@return the result
Parameters:
- table: the table to get from
- tget: the TGet to fetch
"""
pass
def getMultiple(self, table, tgets):
"""
Method for getting multiple rows.
If a row cannot be found there will be a null
value in the result list for that TGet at the
same position.
So the Results are in the same order as the TGets.
Parameters:
- table: the table to get from
- tgets: a list of TGets to fetch, the Result list
will have the Results at corresponding positions
or null if there was an error
"""
pass
def put(self, table, tput):
"""
Commit a TPut to a table.
Parameters:
- table: the table to put data in
- tput: the TPut to put
"""
pass
def checkAndPut(self, table, row, family, qualifier, value, tput):
"""
Atomically checks if a row/family/qualifier value matches the expected
value. If it does, it adds the TPut.
@return true if the new put was executed, false otherwise
Parameters:
- table: to check in and put to
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- tput: the TPut to put if the check succeeds
"""
pass
def putMultiple(self, table, tputs):
"""
Commit a List of Puts to the table.
Parameters:
- table: the table to put data in
- tputs: a list of TPuts to commit
"""
pass
def deleteSingle(self, table, tdelete):
"""
Deletes as specified by the TDelete.
Note: "delete" is a reserved keyword and cannot be used in Thrift
thus the inconsistent naming scheme from the other functions.
Parameters:
- table: the table to delete from
- tdelete: the TDelete to delete
"""
pass
def deleteMultiple(self, table, tdeletes):
"""
Bulk commit a List of TDeletes to the table.
Throws a TIOError if any of the deletes fail.
Always returns an empty list for backwards compatibility.
Parameters:
- table: the table to delete from
- tdeletes: list of TDeletes to delete
"""
pass
def checkAndDelete(self, table, row, family, qualifier, value, tdelete):
"""
Atomically checks if a row/family/qualifier value matches the expected
value. If it does, it adds the delete.
@return true if the new delete was executed, false otherwise
Parameters:
- table: to check in and delete from
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- tdelete: the TDelete to execute if the check succeeds
"""
pass
def increment(self, table, tincrement):
"""
Parameters:
- table: the table to increment the value on
- tincrement: the TIncrement to increment
"""
pass
def append(self, table, tappend):
"""
Parameters:
- table: the table to append the value on
- tappend: the TAppend to append
"""
pass
def openScanner(self, table, tscan):
"""
Get a Scanner for the provided TScan object.
@return Scanner Id to be used with other scanner procedures
Parameters:
- table: the table to get the Scanner for
- tscan: the scan object to get a Scanner for
"""
pass
def getScannerRows(self, scannerId, numRows):
"""
Grabs multiple rows from a Scanner.
@return Between zero and numRows TResults
Parameters:
- scannerId: the Id of the Scanner to return rows from. This is an Id returned from the openScanner function.
- numRows: number of rows to return
"""
pass
def closeScanner(self, scannerId):
"""
Closes the scanner. Should be called to free server side resources timely.
Typically close once the scanner is not needed anymore, i.e. after looping
over it to get all the required rows.
Parameters:
- scannerId: the Id of the Scanner to close *
"""
pass
def mutateRow(self, table, trowMutations):
"""
mutateRow performs multiple mutations atomically on a single row.
Parameters:
- table: table to apply the mutations
- trowMutations: mutations to apply
"""
pass
def getScannerResults(self, table, tscan, numRows):
"""
Get results for the provided TScan object.
This helper function opens a scanner, get the results and close the scanner.
@return between zero and numRows TResults
Parameters:
- table: the table to get the Scanner for
- tscan: the scan object to get a Scanner for
- numRows: number of rows to return
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def exists(self, table, tget):
"""
Test for the existence of columns in the table, as specified in the TGet.
@return true if the specified TGet matches one or more keys, false if not
Parameters:
- table: the table to check on
- tget: the TGet to check for
"""
self.send_exists(table, tget)
return self.recv_exists()
def send_exists(self, table, tget):
self._oprot.writeMessageBegin('exists', TMessageType.CALL, self._seqid)
args = exists_args()
args.table = table
args.tget = tget
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_exists(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = exists_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "exists failed: unknown result")
def get(self, table, tget):
"""
Method for getting data from a row.
If the row cannot be found an empty Result is returned.
This can be checked by the empty field of the TResult
@return the result
Parameters:
- table: the table to get from
- tget: the TGet to fetch
"""
self.send_get(table, tget)
return self.recv_get()
def send_get(self, table, tget):
self._oprot.writeMessageBegin('get', TMessageType.CALL, self._seqid)
args = get_args()
args.table = table
args.tget = tget
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = get_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "get failed: unknown result")
def getMultiple(self, table, tgets):
"""
Method for getting multiple rows.
If a row cannot be found there will be a null
value in the result list for that TGet at the
same position.
So the Results are in the same order as the TGets.
Parameters:
- table: the table to get from
- tgets: a list of TGets to fetch, the Result list
will have the Results at corresponding positions
or null if there was an error
"""
self.send_getMultiple(table, tgets)
return self.recv_getMultiple()
def send_getMultiple(self, table, tgets):
self._oprot.writeMessageBegin('getMultiple', TMessageType.CALL, self._seqid)
args = getMultiple_args()
args.table = table
args.tgets = tgets
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getMultiple(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getMultiple_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "getMultiple failed: unknown result")
def put(self, table, tput):
"""
Commit a TPut to a table.
Parameters:
- table: the table to put data in
- tput: the TPut to put
"""
self.send_put(table, tput)
self.recv_put()
def send_put(self, table, tput):
self._oprot.writeMessageBegin('put', TMessageType.CALL, self._seqid)
args = put_args()
args.table = table
args.tput = tput
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_put(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = put_result()
result.read(iprot)
iprot.readMessageEnd()
if result.io is not None:
raise result.io
return
def checkAndPut(self, table, row, family, qualifier, value, tput):
"""
Atomically checks if a row/family/qualifier value matches the expected
value. If it does, it adds the TPut.
@return true if the new put was executed, false otherwise
Parameters:
- table: to check in and put to
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- tput: the TPut to put if the check succeeds
"""
self.send_checkAndPut(table, row, family, qualifier, value, tput)
return self.recv_checkAndPut()
def send_checkAndPut(self, table, row, family, qualifier, value, tput):
self._oprot.writeMessageBegin('checkAndPut', TMessageType.CALL, self._seqid)
args = checkAndPut_args()
args.table = table
args.row = row
args.family = family
args.qualifier = qualifier
args.value = value
args.tput = tput
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_checkAndPut(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = checkAndPut_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "checkAndPut failed: unknown result")
def putMultiple(self, table, tputs):
"""
Commit a List of Puts to the table.
Parameters:
- table: the table to put data in
- tputs: a list of TPuts to commit
"""
self.send_putMultiple(table, tputs)
self.recv_putMultiple()
def send_putMultiple(self, table, tputs):
self._oprot.writeMessageBegin('putMultiple', TMessageType.CALL, self._seqid)
args = putMultiple_args()
args.table = table
args.tputs = tputs
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_putMultiple(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = putMultiple_result()
result.read(iprot)
iprot.readMessageEnd()
if result.io is not None:
raise result.io
return
def deleteSingle(self, table, tdelete):
"""
Deletes as specified by the TDelete.
Note: "delete" is a reserved keyword and cannot be used in Thrift
thus the inconsistent naming scheme from the other functions.
Parameters:
- table: the table to delete from
- tdelete: the TDelete to delete
"""
self.send_deleteSingle(table, tdelete)
self.recv_deleteSingle()
def send_deleteSingle(self, table, tdelete):
self._oprot.writeMessageBegin('deleteSingle', TMessageType.CALL, self._seqid)
args = deleteSingle_args()
args.table = table
args.tdelete = tdelete
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteSingle(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deleteSingle_result()
result.read(iprot)
iprot.readMessageEnd()
if result.io is not None:
raise result.io
return
def deleteMultiple(self, table, tdeletes):
"""
Bulk commit a List of TDeletes to the table.
Throws a TIOError if any of the deletes fail.
Always returns an empty list for backwards compatibility.
Parameters:
- table: the table to delete from
- tdeletes: list of TDeletes to delete
"""
self.send_deleteMultiple(table, tdeletes)
return self.recv_deleteMultiple()
def send_deleteMultiple(self, table, tdeletes):
self._oprot.writeMessageBegin('deleteMultiple', TMessageType.CALL, self._seqid)
args = deleteMultiple_args()
args.table = table
args.tdeletes = tdeletes
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteMultiple(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deleteMultiple_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "deleteMultiple failed: unknown result")
def checkAndDelete(self, table, row, family, qualifier, value, tdelete):
"""
Atomically checks if a row/family/qualifier value matches the expected
value. If it does, it adds the delete.
@return true if the new delete was executed, false otherwise
Parameters:
- table: to check in and delete from
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- tdelete: the TDelete to execute if the check succeeds
"""
self.send_checkAndDelete(table, row, family, qualifier, value, tdelete)
return self.recv_checkAndDelete()
def send_checkAndDelete(self, table, row, family, qualifier, value, tdelete):
self._oprot.writeMessageBegin('checkAndDelete', TMessageType.CALL, self._seqid)
args = checkAndDelete_args()
args.table = table
args.row = row
args.family = family
args.qualifier = qualifier
args.value = value
args.tdelete = tdelete
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_checkAndDelete(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = checkAndDelete_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "checkAndDelete failed: unknown result")
def increment(self, table, tincrement):
"""
Parameters:
- table: the table to increment the value on
- tincrement: the TIncrement to increment
"""
self.send_increment(table, tincrement)
return self.recv_increment()
def send_increment(self, table, tincrement):
self._oprot.writeMessageBegin('increment', TMessageType.CALL, self._seqid)
args = increment_args()
args.table = table
args.tincrement = tincrement
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_increment(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = increment_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "increment failed: unknown result")
def append(self, table, tappend):
"""
Parameters:
- table: the table to append the value on
- tappend: the TAppend to append
"""
self.send_append(table, tappend)
return self.recv_append()
def send_append(self, table, tappend):
self._oprot.writeMessageBegin('append', TMessageType.CALL, self._seqid)
args = append_args()
args.table = table
args.tappend = tappend
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_append(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = append_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "append failed: unknown result")
def openScanner(self, table, tscan):
"""
Get a Scanner for the provided TScan object.
@return Scanner Id to be used with other scanner procedures
Parameters:
- table: the table to get the Scanner for
- tscan: the scan object to get a Scanner for
"""
self.send_openScanner(table, tscan)
return self.recv_openScanner()
def send_openScanner(self, table, tscan):
self._oprot.writeMessageBegin('openScanner', TMessageType.CALL, self._seqid)
args = openScanner_args()
args.table = table
args.tscan = tscan
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_openScanner(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = openScanner_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "openScanner failed: unknown result")
def getScannerRows(self, scannerId, numRows):
"""
Grabs multiple rows from a Scanner.
@return Between zero and numRows TResults
Parameters:
- scannerId: the Id of the Scanner to return rows from. This is an Id returned from the openScanner function.
- numRows: number of rows to return
"""
self.send_getScannerRows(scannerId, numRows)
return self.recv_getScannerRows()
def send_getScannerRows(self, scannerId, numRows):
self._oprot.writeMessageBegin('getScannerRows', TMessageType.CALL, self._seqid)
args = getScannerRows_args()
args.scannerId = scannerId
args.numRows = numRows
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getScannerRows(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getScannerRows_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
if result.ia is not None:
raise result.ia
raise TApplicationException(TApplicationException.MISSING_RESULT, "getScannerRows failed: unknown result")
def closeScanner(self, scannerId):
"""
Closes the scanner. Should be called to free server side resources timely.
Typically close once the scanner is not needed anymore, i.e. after looping
over it to get all the required rows.
Parameters:
- scannerId: the Id of the Scanner to close *
"""
self.send_closeScanner(scannerId)
self.recv_closeScanner()
def send_closeScanner(self, scannerId):
self._oprot.writeMessageBegin('closeScanner', TMessageType.CALL, self._seqid)
args = closeScanner_args()
args.scannerId = scannerId
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_closeScanner(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = closeScanner_result()
result.read(iprot)
iprot.readMessageEnd()
if result.io is not None:
raise result.io
if result.ia is not None:
raise result.ia
return
def mutateRow(self, table, trowMutations):
"""
mutateRow performs multiple mutations atomically on a single row.
Parameters:
- table: table to apply the mutations
- trowMutations: mutations to apply
"""
self.send_mutateRow(table, trowMutations)
self.recv_mutateRow()
def send_mutateRow(self, table, trowMutations):
self._oprot.writeMessageBegin('mutateRow', TMessageType.CALL, self._seqid)
args = mutateRow_args()
args.table = table
args.trowMutations = trowMutations
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_mutateRow(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = mutateRow_result()
result.read(iprot)
iprot.readMessageEnd()
if result.io is not None:
raise result.io
return
def getScannerResults(self, table, tscan, numRows):
"""
Get results for the provided TScan object.
This helper function opens a scanner, get the results and close the scanner.
@return between zero and numRows TResults
Parameters:
- table: the table to get the Scanner for
- tscan: the scan object to get a Scanner for
- numRows: number of rows to return
"""
self.send_getScannerResults(table, tscan, numRows)
return self.recv_getScannerResults()
def send_getScannerResults(self, table, tscan, numRows):
self._oprot.writeMessageBegin('getScannerResults', TMessageType.CALL, self._seqid)
args = getScannerResults_args()
args.table = table
args.tscan = tscan
args.numRows = numRows
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getScannerResults(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getScannerResults_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.io is not None:
raise result.io
raise TApplicationException(TApplicationException.MISSING_RESULT, "getScannerResults failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["exists"] = Processor.process_exists
self._processMap["get"] = Processor.process_get
self._processMap["getMultiple"] = Processor.process_getMultiple
self._processMap["put"] = Processor.process_put
self._processMap["checkAndPut"] = Processor.process_checkAndPut
self._processMap["putMultiple"] = Processor.process_putMultiple
self._processMap["deleteSingle"] = Processor.process_deleteSingle
self._processMap["deleteMultiple"] = Processor.process_deleteMultiple
self._processMap["checkAndDelete"] = Processor.process_checkAndDelete
self._processMap["increment"] = Processor.process_increment
self._processMap["append"] = Processor.process_append
self._processMap["openScanner"] = Processor.process_openScanner
self._processMap["getScannerRows"] = Processor.process_getScannerRows
self._processMap["closeScanner"] = Processor.process_closeScanner
self._processMap["mutateRow"] = Processor.process_mutateRow
self._processMap["getScannerResults"] = Processor.process_getScannerResults
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_exists(self, seqid, iprot, oprot):
args = exists_args()
args.read(iprot)
iprot.readMessageEnd()
result = exists_result()
try:
result.success = self._handler.exists(args.table, args.tget)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("exists", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get(self, seqid, iprot, oprot):
args = get_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_result()
try:
result.success = self._handler.get(args.table, args.tget)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("get", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getMultiple(self, seqid, iprot, oprot):
args = getMultiple_args()
args.read(iprot)
iprot.readMessageEnd()
result = getMultiple_result()
try:
result.success = self._handler.getMultiple(args.table, args.tgets)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getMultiple", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_put(self, seqid, iprot, oprot):
args = put_args()
args.read(iprot)
iprot.readMessageEnd()
result = put_result()
try:
self._handler.put(args.table, args.tput)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("put", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_checkAndPut(self, seqid, iprot, oprot):
args = checkAndPut_args()
args.read(iprot)
iprot.readMessageEnd()
result = checkAndPut_result()
try:
result.success = self._handler.checkAndPut(args.table, args.row, args.family, args.qualifier, args.value, args.tput)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("checkAndPut", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_putMultiple(self, seqid, iprot, oprot):
args = putMultiple_args()
args.read(iprot)
iprot.readMessageEnd()
result = putMultiple_result()
try:
self._handler.putMultiple(args.table, args.tputs)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("putMultiple", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteSingle(self, seqid, iprot, oprot):
args = deleteSingle_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteSingle_result()
try:
self._handler.deleteSingle(args.table, args.tdelete)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("deleteSingle", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteMultiple(self, seqid, iprot, oprot):
args = deleteMultiple_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteMultiple_result()
try:
result.success = self._handler.deleteMultiple(args.table, args.tdeletes)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("deleteMultiple", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_checkAndDelete(self, seqid, iprot, oprot):
args = checkAndDelete_args()
args.read(iprot)
iprot.readMessageEnd()
result = checkAndDelete_result()
try:
result.success = self._handler.checkAndDelete(args.table, args.row, args.family, args.qualifier, args.value, args.tdelete)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("checkAndDelete", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_increment(self, seqid, iprot, oprot):
args = increment_args()
args.read(iprot)
iprot.readMessageEnd()
result = increment_result()
try:
result.success = self._handler.increment(args.table, args.tincrement)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("increment", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_append(self, seqid, iprot, oprot):
args = append_args()
args.read(iprot)
iprot.readMessageEnd()
result = append_result()
try:
result.success = self._handler.append(args.table, args.tappend)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("append", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_openScanner(self, seqid, iprot, oprot):
args = openScanner_args()
args.read(iprot)
iprot.readMessageEnd()
result = openScanner_result()
try:
result.success = self._handler.openScanner(args.table, args.tscan)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("openScanner", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getScannerRows(self, seqid, iprot, oprot):
args = getScannerRows_args()
args.read(iprot)
iprot.readMessageEnd()
result = getScannerRows_result()
try:
result.success = self._handler.getScannerRows(args.scannerId, args.numRows)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except TIllegalArgument as ia:
msg_type = TMessageType.REPLY
result.ia = ia
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getScannerRows", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_closeScanner(self, seqid, iprot, oprot):
args = closeScanner_args()
args.read(iprot)
iprot.readMessageEnd()
result = closeScanner_result()
try:
self._handler.closeScanner(args.scannerId)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except TIllegalArgument as ia:
msg_type = TMessageType.REPLY
result.ia = ia
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("closeScanner", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_mutateRow(self, seqid, iprot, oprot):
args = mutateRow_args()
args.read(iprot)
iprot.readMessageEnd()
result = mutateRow_result()
try:
self._handler.mutateRow(args.table, args.trowMutations)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("mutateRow", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getScannerResults(self, seqid, iprot, oprot):
args = getScannerResults_args()
args.read(iprot)
iprot.readMessageEnd()
result = getScannerResults_result()
try:
result.success = self._handler.getScannerResults(args.table, args.tscan, args.numRows)
msg_type = TMessageType.REPLY
except (TTransport.TTransportException, KeyboardInterrupt, SystemExit):
raise
except TIOError as io:
msg_type = TMessageType.REPLY
result.io = io
except Exception as ex:
msg_type = TMessageType.EXCEPTION
logging.exception(ex)
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getScannerResults", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class exists_args:
"""
Attributes:
- table: the table to check on
- tget: the TGet to check for
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'tget', (TGet, TGet.thrift_spec), None, ), # 2
)
def __init__(self, table=None, tget=None,):
self.table = table
self.tget = tget
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tget = TGet()
self.tget.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('exists_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tget is not None:
oprot.writeFieldBegin('tget', TType.STRUCT, 2)
self.tget.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tget is None:
raise TProtocol.TProtocolException(message='Required field tget is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tget)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class exists_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('exists_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_args:
"""
Attributes:
- table: the table to get from
- tget: the TGet to fetch
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'tget', (TGet, TGet.thrift_spec), None, ), # 2
)
def __init__(self, table=None, tget=None,):
self.table = table
self.tget = tget
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tget = TGet()
self.tget.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tget is not None:
oprot.writeFieldBegin('tget', TType.STRUCT, 2)
self.tget.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tget is None:
raise TProtocol.TProtocolException(message='Required field tget is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tget)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TResult, TResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getMultiple_args:
"""
Attributes:
- table: the table to get from
- tgets: a list of TGets to fetch, the Result list
will have the Results at corresponding positions
or null if there was an error
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.LIST, 'tgets', (TType.STRUCT,(TGet, TGet.thrift_spec)), None, ), # 2
)
def __init__(self, table=None, tgets=None,):
self.table = table
self.tgets = tgets
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.tgets = []
(_etype120, _size117) = iprot.readListBegin()
for _i121 in xrange(_size117):
_elem122 = TGet()
_elem122.read(iprot)
self.tgets.append(_elem122)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getMultiple_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tgets is not None:
oprot.writeFieldBegin('tgets', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.tgets))
for iter123 in self.tgets:
iter123.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tgets is None:
raise TProtocol.TProtocolException(message='Required field tgets is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tgets)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getMultiple_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(TResult, TResult.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype127, _size124) = iprot.readListBegin()
for _i128 in xrange(_size124):
_elem129 = TResult()
_elem129.read(iprot)
self.success.append(_elem129)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getMultiple_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter130 in self.success:
iter130.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class put_args:
"""
Attributes:
- table: the table to put data in
- tput: the TPut to put
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'tput', (TPut, TPut.thrift_spec), None, ), # 2
)
def __init__(self, table=None, tput=None,):
self.table = table
self.tput = tput
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tput = TPut()
self.tput.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('put_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tput is not None:
oprot.writeFieldBegin('tput', TType.STRUCT, 2)
self.tput.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tput is None:
raise TProtocol.TProtocolException(message='Required field tput is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tput)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class put_result:
"""
Attributes:
- io
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, io=None,):
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('put_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class checkAndPut_args:
"""
Attributes:
- table: to check in and put to
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- tput: the TPut to put if the check succeeds
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRING, 'row', None, None, ), # 2
(3, TType.STRING, 'family', None, None, ), # 3
(4, TType.STRING, 'qualifier', None, None, ), # 4
(5, TType.STRING, 'value', None, None, ), # 5
(6, TType.STRUCT, 'tput', (TPut, TPut.thrift_spec), None, ), # 6
)
def __init__(self, table=None, row=None, family=None, qualifier=None, value=None, tput=None,):
self.table = table
self.row = row
self.family = family
self.qualifier = qualifier
self.value = value
self.tput = tput
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.row = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.family = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.qualifier = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.value = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.tput = TPut()
self.tput.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('checkAndPut_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.row is not None:
oprot.writeFieldBegin('row', TType.STRING, 2)
oprot.writeString(self.row)
oprot.writeFieldEnd()
if self.family is not None:
oprot.writeFieldBegin('family', TType.STRING, 3)
oprot.writeString(self.family)
oprot.writeFieldEnd()
if self.qualifier is not None:
oprot.writeFieldBegin('qualifier', TType.STRING, 4)
oprot.writeString(self.qualifier)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 5)
oprot.writeString(self.value)
oprot.writeFieldEnd()
if self.tput is not None:
oprot.writeFieldBegin('tput', TType.STRUCT, 6)
self.tput.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.row is None:
raise TProtocol.TProtocolException(message='Required field row is unset!')
if self.family is None:
raise TProtocol.TProtocolException(message='Required field family is unset!')
if self.qualifier is None:
raise TProtocol.TProtocolException(message='Required field qualifier is unset!')
if self.tput is None:
raise TProtocol.TProtocolException(message='Required field tput is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.row)
value = (value * 31) ^ hash(self.family)
value = (value * 31) ^ hash(self.qualifier)
value = (value * 31) ^ hash(self.value)
value = (value * 31) ^ hash(self.tput)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class checkAndPut_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('checkAndPut_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class putMultiple_args:
"""
Attributes:
- table: the table to put data in
- tputs: a list of TPuts to commit
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.LIST, 'tputs', (TType.STRUCT,(TPut, TPut.thrift_spec)), None, ), # 2
)
def __init__(self, table=None, tputs=None,):
self.table = table
self.tputs = tputs
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.tputs = []
(_etype134, _size131) = iprot.readListBegin()
for _i135 in xrange(_size131):
_elem136 = TPut()
_elem136.read(iprot)
self.tputs.append(_elem136)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('putMultiple_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tputs is not None:
oprot.writeFieldBegin('tputs', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.tputs))
for iter137 in self.tputs:
iter137.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tputs is None:
raise TProtocol.TProtocolException(message='Required field tputs is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tputs)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class putMultiple_result:
"""
Attributes:
- io
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, io=None,):
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('putMultiple_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteSingle_args:
"""
Attributes:
- table: the table to delete from
- tdelete: the TDelete to delete
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'tdelete', (TDelete, TDelete.thrift_spec), None, ), # 2
)
def __init__(self, table=None, tdelete=None,):
self.table = table
self.tdelete = tdelete
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tdelete = TDelete()
self.tdelete.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteSingle_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tdelete is not None:
oprot.writeFieldBegin('tdelete', TType.STRUCT, 2)
self.tdelete.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tdelete is None:
raise TProtocol.TProtocolException(message='Required field tdelete is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tdelete)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteSingle_result:
"""
Attributes:
- io
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, io=None,):
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteSingle_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteMultiple_args:
"""
Attributes:
- table: the table to delete from
- tdeletes: list of TDeletes to delete
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.LIST, 'tdeletes', (TType.STRUCT,(TDelete, TDelete.thrift_spec)), None, ), # 2
)
def __init__(self, table=None, tdeletes=None,):
self.table = table
self.tdeletes = tdeletes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.tdeletes = []
(_etype141, _size138) = iprot.readListBegin()
for _i142 in xrange(_size138):
_elem143 = TDelete()
_elem143.read(iprot)
self.tdeletes.append(_elem143)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteMultiple_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tdeletes is not None:
oprot.writeFieldBegin('tdeletes', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.tdeletes))
for iter144 in self.tdeletes:
iter144.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tdeletes is None:
raise TProtocol.TProtocolException(message='Required field tdeletes is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tdeletes)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class deleteMultiple_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(TDelete, TDelete.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype148, _size145) = iprot.readListBegin()
for _i149 in xrange(_size145):
_elem150 = TDelete()
_elem150.read(iprot)
self.success.append(_elem150)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('deleteMultiple_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter151 in self.success:
iter151.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class checkAndDelete_args:
"""
Attributes:
- table: to check in and delete from
- row: row to check
- family: column family to check
- qualifier: column qualifier to check
- value: the expected value, if not provided the
check is for the non-existence of the
column in question
- tdelete: the TDelete to execute if the check succeeds
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRING, 'row', None, None, ), # 2
(3, TType.STRING, 'family', None, None, ), # 3
(4, TType.STRING, 'qualifier', None, None, ), # 4
(5, TType.STRING, 'value', None, None, ), # 5
(6, TType.STRUCT, 'tdelete', (TDelete, TDelete.thrift_spec), None, ), # 6
)
def __init__(self, table=None, row=None, family=None, qualifier=None, value=None, tdelete=None,):
self.table = table
self.row = row
self.family = family
self.qualifier = qualifier
self.value = value
self.tdelete = tdelete
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.row = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.family = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.qualifier = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.value = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.tdelete = TDelete()
self.tdelete.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('checkAndDelete_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.row is not None:
oprot.writeFieldBegin('row', TType.STRING, 2)
oprot.writeString(self.row)
oprot.writeFieldEnd()
if self.family is not None:
oprot.writeFieldBegin('family', TType.STRING, 3)
oprot.writeString(self.family)
oprot.writeFieldEnd()
if self.qualifier is not None:
oprot.writeFieldBegin('qualifier', TType.STRING, 4)
oprot.writeString(self.qualifier)
oprot.writeFieldEnd()
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 5)
oprot.writeString(self.value)
oprot.writeFieldEnd()
if self.tdelete is not None:
oprot.writeFieldBegin('tdelete', TType.STRUCT, 6)
self.tdelete.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.row is None:
raise TProtocol.TProtocolException(message='Required field row is unset!')
if self.family is None:
raise TProtocol.TProtocolException(message='Required field family is unset!')
if self.qualifier is None:
raise TProtocol.TProtocolException(message='Required field qualifier is unset!')
if self.tdelete is None:
raise TProtocol.TProtocolException(message='Required field tdelete is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.row)
value = (value * 31) ^ hash(self.family)
value = (value * 31) ^ hash(self.qualifier)
value = (value * 31) ^ hash(self.value)
value = (value * 31) ^ hash(self.tdelete)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class checkAndDelete_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('checkAndDelete_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class increment_args:
"""
Attributes:
- table: the table to increment the value on
- tincrement: the TIncrement to increment
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'tincrement', (TIncrement, TIncrement.thrift_spec), None, ), # 2
)
def __init__(self, table=None, tincrement=None,):
self.table = table
self.tincrement = tincrement
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tincrement = TIncrement()
self.tincrement.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('increment_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tincrement is not None:
oprot.writeFieldBegin('tincrement', TType.STRUCT, 2)
self.tincrement.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tincrement is None:
raise TProtocol.TProtocolException(message='Required field tincrement is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tincrement)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class increment_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TResult, TResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('increment_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_args:
"""
Attributes:
- table: the table to append the value on
- tappend: the TAppend to append
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'tappend', (TAppend, TAppend.thrift_spec), None, ), # 2
)
def __init__(self, table=None, tappend=None,):
self.table = table
self.tappend = tappend
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tappend = TAppend()
self.tappend.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tappend is not None:
oprot.writeFieldBegin('tappend', TType.STRUCT, 2)
self.tappend.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tappend is None:
raise TProtocol.TProtocolException(message='Required field tappend is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tappend)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class append_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (TResult, TResult.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('append_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class openScanner_args:
"""
Attributes:
- table: the table to get the Scanner for
- tscan: the scan object to get a Scanner for
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'tscan', (TScan, TScan.thrift_spec), None, ), # 2
)
def __init__(self, table=None, tscan=None,):
self.table = table
self.tscan = tscan
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tscan = TScan()
self.tscan.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('openScanner_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tscan is not None:
oprot.writeFieldBegin('tscan', TType.STRUCT, 2)
self.tscan.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tscan is None:
raise TProtocol.TProtocolException(message='Required field tscan is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tscan)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class openScanner_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('openScanner_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getScannerRows_args:
"""
Attributes:
- scannerId: the Id of the Scanner to return rows from. This is an Id returned from the openScanner function.
- numRows: number of rows to return
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'scannerId', None, None, ), # 1
(2, TType.I32, 'numRows', None, 1, ), # 2
)
def __init__(self, scannerId=None, numRows=thrift_spec[2][4],):
self.scannerId = scannerId
self.numRows = numRows
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.scannerId = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.numRows = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getScannerRows_args')
if self.scannerId is not None:
oprot.writeFieldBegin('scannerId', TType.I32, 1)
oprot.writeI32(self.scannerId)
oprot.writeFieldEnd()
if self.numRows is not None:
oprot.writeFieldBegin('numRows', TType.I32, 2)
oprot.writeI32(self.numRows)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.scannerId is None:
raise TProtocol.TProtocolException(message='Required field scannerId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.scannerId)
value = (value * 31) ^ hash(self.numRows)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getScannerRows_result:
"""
Attributes:
- success
- io
- ia: if the scannerId is invalid
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(TResult, TResult.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ia', (TIllegalArgument, TIllegalArgument.thrift_spec), None, ), # 2
)
def __init__(self, success=None, io=None, ia=None,):
self.success = success
self.io = io
self.ia = ia
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype155, _size152) = iprot.readListBegin()
for _i156 in xrange(_size152):
_elem157 = TResult()
_elem157.read(iprot)
self.success.append(_elem157)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ia = TIllegalArgument()
self.ia.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getScannerRows_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter158 in self.success:
iter158.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
if self.ia is not None:
oprot.writeFieldBegin('ia', TType.STRUCT, 2)
self.ia.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
value = (value * 31) ^ hash(self.ia)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class closeScanner_args:
"""
Attributes:
- scannerId: the Id of the Scanner to close *
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'scannerId', None, None, ), # 1
)
def __init__(self, scannerId=None,):
self.scannerId = scannerId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.scannerId = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('closeScanner_args')
if self.scannerId is not None:
oprot.writeFieldBegin('scannerId', TType.I32, 1)
oprot.writeI32(self.scannerId)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.scannerId is None:
raise TProtocol.TProtocolException(message='Required field scannerId is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.scannerId)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class closeScanner_result:
"""
Attributes:
- io
- ia: if the scannerId is invalid
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'ia', (TIllegalArgument, TIllegalArgument.thrift_spec), None, ), # 2
)
def __init__(self, io=None, ia=None,):
self.io = io
self.ia = ia
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ia = TIllegalArgument()
self.ia.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('closeScanner_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
if self.ia is not None:
oprot.writeFieldBegin('ia', TType.STRUCT, 2)
self.ia.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.io)
value = (value * 31) ^ hash(self.ia)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class mutateRow_args:
"""
Attributes:
- table: table to apply the mutations
- trowMutations: mutations to apply
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'trowMutations', (TRowMutations, TRowMutations.thrift_spec), None, ), # 2
)
def __init__(self, table=None, trowMutations=None,):
self.table = table
self.trowMutations = trowMutations
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.trowMutations = TRowMutations()
self.trowMutations.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('mutateRow_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.trowMutations is not None:
oprot.writeFieldBegin('trowMutations', TType.STRUCT, 2)
self.trowMutations.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.trowMutations is None:
raise TProtocol.TProtocolException(message='Required field trowMutations is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.trowMutations)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class mutateRow_result:
"""
Attributes:
- io
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, io=None,):
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('mutateRow_result')
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getScannerResults_args:
"""
Attributes:
- table: the table to get the Scanner for
- tscan: the scan object to get a Scanner for
- numRows: number of rows to return
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'table', None, None, ), # 1
(2, TType.STRUCT, 'tscan', (TScan, TScan.thrift_spec), None, ), # 2
(3, TType.I32, 'numRows', None, 1, ), # 3
)
def __init__(self, table=None, tscan=None, numRows=thrift_spec[3][4],):
self.table = table
self.tscan = tscan
self.numRows = numRows
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.table = iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.tscan = TScan()
self.tscan.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.numRows = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getScannerResults_args')
if self.table is not None:
oprot.writeFieldBegin('table', TType.STRING, 1)
oprot.writeString(self.table)
oprot.writeFieldEnd()
if self.tscan is not None:
oprot.writeFieldBegin('tscan', TType.STRUCT, 2)
self.tscan.write(oprot)
oprot.writeFieldEnd()
if self.numRows is not None:
oprot.writeFieldBegin('numRows', TType.I32, 3)
oprot.writeI32(self.numRows)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.table is None:
raise TProtocol.TProtocolException(message='Required field table is unset!')
if self.tscan is None:
raise TProtocol.TProtocolException(message='Required field tscan is unset!')
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.table)
value = (value * 31) ^ hash(self.tscan)
value = (value * 31) ^ hash(self.numRows)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class getScannerResults_result:
"""
Attributes:
- success
- io
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT,(TResult, TResult.thrift_spec)), None, ), # 0
(1, TType.STRUCT, 'io', (TIOError, TIOError.thrift_spec), None, ), # 1
)
def __init__(self, success=None, io=None,):
self.success = success
self.io = io
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype162, _size159) = iprot.readListBegin()
for _i163 in xrange(_size159):
_elem164 = TResult()
_elem164.read(iprot)
self.success.append(_elem164)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.io = TIOError()
self.io.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('getScannerResults_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter165 in self.success:
iter165.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.io is not None:
oprot.writeFieldBegin('io', TType.STRUCT, 1)
self.io.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __hash__(self):
value = 17
value = (value * 31) ^ hash(self.success)
value = (value * 31) ^ hash(self.io)
return value
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 31.294442
| 188
| 0.659501
| 14,923
| 124,990
| 5.343095
| 0.023655
| 0.01436
| 0.025622
| 0.022801
| 0.909212
| 0.88769
| 0.879789
| 0.868339
| 0.859735
| 0.854669
| 0
| 0.007778
| 0.230578
| 124,990
| 3,993
| 189
| 31.302279
| 0.821327
| 0.083687
| 0
| 0.861066
| 1
| 0
| 0.034089
| 0.000966
| 0
| 0
| 0
| 0
| 0
| 1
| 0.110821
| false
| 0.00523
| 0.002288
| 0.026152
| 0.217718
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
42a8efd89bd1ebe6ae22861be27f53913a7f291c
| 5,810
|
py
|
Python
|
tests/commands/test_iotslice.py
|
juanmagal/iot-slice-orchestrator
|
9fc0df4f74b2788ea116549c001bcb8c6b663280
|
[
"Apache-2.0"
] | 2
|
2019-08-16T13:05:39.000Z
|
2019-12-24T16:57:29.000Z
|
tests/commands/test_iotslice.py
|
juanmagal/iot-slice-orchestrator
|
9fc0df4f74b2788ea116549c001bcb8c6b663280
|
[
"Apache-2.0"
] | null | null | null |
tests/commands/test_iotslice.py
|
juanmagal/iot-slice-orchestrator
|
9fc0df4f74b2788ea116549c001bcb8c6b663280
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for our `iotorch slice` subcommand."""
from subprocess import PIPE, Popen as popen
from unittest import TestCase
class TestSlice(TestCase):
def test_returns_iotslice_get(self):
name='test'
operation='get'
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(name.encode('utf-8') in output)
def test_returns_iotslice_get_file_does_not_exit(self):
name='test'
operation='get'
text='Nothing to get'
configfile='./tests/conf/iotorch_not_exist.toml'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotslice_create(self):
name='slice1'
edge='test1'
server='test1'
operation='create'
configfile='./tests/conf/iotorch.toml'
text= "IoT Slice " + name + " created"
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--edge='+edge,'--cloud='+server, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
operation='get'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(edge.encode('utf-8') in output)
def test_returns_iotslice_create_file_does_not_exist(self):
name='slice2'
edge='test1'
server='test1'
operation='create'
configfile='./tests/conf/ghost.toml'
text= 'Clusters do not exist'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--edge='+edge,'--cloud='+server, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
operation='get'
text= 'Nothing to get'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotslice_create_edge_cluster_does_not_exist(self):
name='slice3'
edge='ghost'
server='test1'
operation='create'
configfile='./tests/conf/iotorch.toml'
text= 'Edge cluster does not exist'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--edge='+edge,'--cloud='+server, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
operation='get'
text= 'Nothing to get'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
print(output)
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotslice_create_cloud_cluster_does_not_exist(self):
name='slice4'
edge='test1'
server='ghost'
operation='create'
configfile='./tests/conf/iotorch.toml'
text= 'Cloud cluster does not exist'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--edge='+edge,'--cloud='+server, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
operation='get'
text= 'Nothing to get'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
print(output)
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotslice_delete(self):
name='test1'
operation='delete'
text= "IoT Slice " + name + " deleted"
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
operation='get'
text='Nothing to get'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotslice_delete_device_does_not_exist(self):
name='ghost'
operation='delete'
text='Nothing to delete'
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotslice_delete_file_does_not_exist(self):
name='server1'
operation='delete'
text='Nothing to delete'
configfile='./tests/conf/iotorch_not_exist.toml'
output = popen(['iotorch', 'iotslice', operation, '--name='+name, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
def test_returns_iotslice_list(self):
name='test'
operation='list'
configfile='./tests/conf/iotorch.toml'
output = popen(['iotorch', 'iotslice', operation, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(name.encode('utf-8') in output)
def test_returns_iotslice_list_file_does_not_exist(self):
name='test'
operation='list'
configfile='./tests/conf/iotorch_not_exist.toml'
text='Nothing to list'
output = popen(['iotorch', 'iotslice', operation, '--configfile='+configfile], stdout=PIPE).communicate()[0]
self.assertTrue(text.encode('utf-8') in output)
| 46.111111
| 166
| 0.633391
| 658
| 5,810
| 5.487842
| 0.098784
| 0.04874
| 0.079756
| 0.115204
| 0.909997
| 0.88175
| 0.841318
| 0.837995
| 0.824425
| 0.785932
| 0
| 0.009426
| 0.196558
| 5,810
| 125
| 167
| 46.48
| 0.764139
| 0.007057
| 0
| 0.71028
| 0
| 0
| 0.240889
| 0.052586
| 0
| 0
| 0
| 0
| 0.149533
| 1
| 0.102804
| false
| 0
| 0.018692
| 0
| 0.130841
| 0.018692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
35f16df50af99a1674b0dd78a0c55223156755e6
| 3,678
|
py
|
Python
|
tests/factor_par_marginal_tests.py
|
petermlm/ProbPy
|
efb55962283e1c6c2422de812ec8689ffb9dbf16
|
[
"MIT"
] | 16
|
2015-01-05T19:14:24.000Z
|
2021-08-19T22:25:04.000Z
|
tests/factor_par_marginal_tests.py
|
petermlm/ProbPy
|
efb55962283e1c6c2422de812ec8689ffb9dbf16
|
[
"MIT"
] | null | null | null |
tests/factor_par_marginal_tests.py
|
petermlm/ProbPy
|
efb55962283e1c6c2422de812ec8689ffb9dbf16
|
[
"MIT"
] | 7
|
2015-04-10T18:24:58.000Z
|
2018-01-26T23:54:59.000Z
|
from nose.tools import with_setup, nottest
from tests.par_test_base import ParTestBase
class TestParMarginalMult(ParTestBase):
def marginal_test_0(self):
"""
X, f(X)
"""
res = self.X_factor.marginal(self.X)
par_res = self.X_par_factor.marginal(self.X)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_1(self):
"""
X, f(Y)
"""
res = self.X_factor.marginal(self.X)
par_res = self.X_par_factor.marginal(self.X)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_2(self):
"""
X, f(X, Y)
"""
res = self.XY_factor.marginal(self.X)
par_res = self.XY_par_factor.marginal(self.X)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_3(self):
"""
Y, f(X, Y)
"""
res = self.XY_factor.marginal(self.Y)
par_res = self.XY_par_factor.marginal(self.Y)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_4(self):
"""
X, Y, f(X, Y)
"""
res = self.XY_factor.marginal([self.X, self.Y])
par_res = self.XY_par_factor.marginal([self.X, self.Y])
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_5(self):
"""
Z, f(X, Y)
"""
res = self.XY_factor.marginal([self.X, self.Y])
par_res = self.XY_par_factor.marginal([self.X, self.Y])
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_6(self):
"""
X, f(X, Y, Z)
"""
res = self.XYZ_factor.marginal(self.X)
par_res = self.XYZ_par_factor.marginal(self.X)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_7(self):
"""
Y, f(X, Y, Z)
"""
res = self.XYZ_factor.marginal(self.Y)
par_res = self.XYZ_par_factor.marginal(self.Y)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_8(self):
"""
Z, f(X, Y, Z)
"""
res = self.XYZ_factor.marginal(self.Z)
par_res = self.XYZ_par_factor.marginal(self.Z)
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_9(self):
"""
X, Y, f(X, Y, Z)
"""
res = self.XYZ_factor.marginal([self.X, self.Y])
par_res = self.XYZ_par_factor.marginal([self.X, self.Y])
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_10(self):
"""
X, Z, f(X, Y, Z)
"""
res = self.XYZ_factor.marginal([self.X, self.Z])
par_res = self.XYZ_par_factor.marginal([self.X, self.Z])
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_11(self):
"""
Y, Z, f(X, Y, Z)
"""
res = self.XYZ_factor.marginal([self.Y, self.Z])
par_res = self.XYZ_par_factor.marginal([self.Y, self.Z])
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
def marginal_test_12(self):
"""
X, Y, Z, f(X, Y, Z)
"""
res = self.XYZ_factor.marginal([self.X, self.Y, self.Z])
par_res = self.XYZ_par_factor.marginal([self.X, self.Y, self.Z])
assert res.rand_vars == par_res.rand_vars and res.values == par_res.values
| 29.902439
| 82
| 0.589179
| 563
| 3,678
| 3.612789
| 0.076377
| 0.115044
| 0.230089
| 0.168142
| 0.918387
| 0.905113
| 0.900197
| 0.900197
| 0.900197
| 0.887906
| 0
| 0.006004
| 0.275421
| 3,678
| 122
| 83
| 30.147541
| 0.757223
| 0.04758
| 0
| 0.381818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.236364
| 1
| 0.236364
| false
| 0
| 0.036364
| 0
| 0.290909
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
35fd8b8af87cd28a4ab5ca7d2f16fdf9c2bfcabe
| 488
|
gyp
|
Python
|
library/boost-mpl/1.57.0.gyp
|
KjellSchubert/bru
|
dd70b721d07fbd27c57c845cc3a29cd8f2dfc587
|
[
"MIT"
] | 3
|
2015-01-06T15:22:16.000Z
|
2015-11-27T18:13:04.000Z
|
library/boost-mpl/1.57.0.gyp
|
KjellSchubert/bru
|
dd70b721d07fbd27c57c845cc3a29cd8f2dfc587
|
[
"MIT"
] | 7
|
2015-02-10T15:13:38.000Z
|
2021-05-30T07:51:13.000Z
|
library/boost-mpl/1.57.0.gyp
|
KjellSchubert/bru
|
dd70b721d07fbd27c57c845cc3a29cd8f2dfc587
|
[
"MIT"
] | 3
|
2015-01-29T17:19:53.000Z
|
2016-01-06T12:50:06.000Z
|
{
"targets": [
{
"target_name": "boost-mpl",
"type": "none",
"dependencies": [
"../boost-mpl-type_traits-typeof-utility/boost-mpl-type_traits-typeof-utility.gyp:boost-mpl-type_traits-typeof-utility"
],
"export_dependent_settings": [
"../boost-mpl-type_traits-typeof-utility/boost-mpl-type_traits-typeof-utility.gyp:boost-mpl-type_traits-typeof-utility"
]
}
]
}
| 34.857143
| 135
| 0.55123
| 49
| 488
| 5.306122
| 0.306122
| 0.215385
| 0.323077
| 0.415385
| 0.738462
| 0.738462
| 0.738462
| 0.738462
| 0.738462
| 0.738462
| 0
| 0
| 0.29918
| 488
| 14
| 136
| 34.857143
| 0.760234
| 0
| 0
| 0.142857
| 0
| 0.142857
| 0.625767
| 0.529652
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
c421ed6cf752d3f7c4694097deb758e614f5b2ef
| 7,202
|
py
|
Python
|
Testes_automatizados/test_cifra_de_cesar.py
|
GregorioFornetti/CriPython
|
12b5bc64188de3a7c81d8f9d0a392f6edc7ac827
|
[
"MIT"
] | 1
|
2020-05-17T03:00:18.000Z
|
2020-05-17T03:00:18.000Z
|
Testes_automatizados/test_cifra_de_cesar.py
|
GregorioFornetti/Cripythongrafia
|
12b5bc64188de3a7c81d8f9d0a392f6edc7ac827
|
[
"MIT"
] | 1
|
2020-05-17T15:59:26.000Z
|
2020-05-17T15:59:26.000Z
|
Testes_automatizados/test_cifra_de_cesar.py
|
GregorioFornetti/Cripythongrafia
|
12b5bc64188de3a7c81d8f9d0a392f6edc7ac827
|
[
"MIT"
] | null | null | null |
from Cifras import cifra_de_cesar
import dicionarios
# Opção: APENAS LETRAS
def test_cifra_de_cesar_apenas_letras_chave_1():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['1'],'abc') == 'bcd'
assert cifra_de_cesar.traduzir_modo_apenas_letras(['1'], 'bcd') == 'abc'
def test_cifra_de_cesar_apenas_letras_chave_invalida_vazia():
assert cifra_de_cesar.encriptar_modo_apenas_letras([''], 'abc') == dicionarios.retorna_erro_chave()
assert cifra_de_cesar.traduzir_modo_apenas_letras([''], 'abc') == dicionarios.retorna_erro_chave()
def test_cifra_de_cesar_apenas_letras_chave_invalida_negativa():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['-1'], 'a') == dicionarios.retorna_erro_chave()
assert cifra_de_cesar.traduzir_modo_apenas_letras(['-1'], 'a') == dicionarios.retorna_erro_chave()
def test_cifra_de_cesar_apenas_letras_chave_invalida_texto():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['texto'], 'a') == dicionarios.retorna_erro_chave()
assert cifra_de_cesar.traduzir_modo_apenas_letras(['texto'], 'a') == dicionarios.retorna_erro_chave()
def test_cifra_de_cesar_apenas_letras_chave_invalida_float():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['1.2'], 'a') == dicionarios.retorna_erro_chave()
assert cifra_de_cesar.traduzir_modo_apenas_letras(['2.4'], 'a') == dicionarios.retorna_erro_chave()
def test_cifra_de_cesar_apenas_letras_mensagem_invalida():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['1'], '') == dicionarios.retorna_erro_mensagem()
assert cifra_de_cesar.traduzir_modo_apenas_letras(['1'], '') == dicionarios.retorna_erro_mensagem()
def test_cifra_de_cesar_apenas_letras_volta_alfabeto():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['1'], 'z') == 'a'
assert cifra_de_cesar.traduzir_modo_apenas_letras(['1'], 'a') == 'z'
def test_cifra_de_cesar_apenas_letras_maiusc_minusc():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['1'], 'aAbBcCdD') == 'bBcCdDeE'
assert cifra_de_cesar.traduzir_modo_apenas_letras(['1'], 'bBcCdDeE') == 'aAbBcCdD'
def test_cifra_de_cesar_apenas_letras_caracteres_especiais():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['1'], 'áéíóú!? aeiou') == 'áéíóú!? bfjpv'
assert cifra_de_cesar.traduzir_modo_apenas_letras(['1'], 'áéíóú!? bfjpv') == 'áéíóú!? aeiou'
def test_cifra_de_cesar_apenas_letras_chave_maior():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['10'], 'az') == 'kj'
assert cifra_de_cesar.traduzir_modo_apenas_letras(['10'], 'kj') == 'az'
def test_cifra_de_cesar_apenas_letras_texto_grande_1():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['1'],
'abcdefghijklmnopqrstuvwxyz') == 'bcdefghijklmnopqrstuvwxyza'
assert cifra_de_cesar.traduzir_modo_apenas_letras(['1'],
'bcdefghijklmnopqrstuvwxyza') == 'abcdefghijklmnopqrstuvwxyz'
def test_cifra_de_cesar_apenas_letras_texto_grande_2():
assert cifra_de_cesar.encriptar_modo_apenas_letras(['7'],
'Bom dia, Boa tarde, Boa noite!') == 'Ivt kph, Ivh ahykl, Ivh uvpal!'
assert cifra_de_cesar.traduzir_modo_apenas_letras(['7'],
'Ivt kph, Ivh ahykl, Ivh uvpal!') == 'Bom dia, Boa tarde, Boa noite!'
# OPÇÃO: VÁRIOS CARACTERES
def test_cifra_de_cesar_varios_caracteres_chave_1():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['1'], 'a') == 'b'
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['1'], 'b') == 'a'
def test_cifra_de_cesar_varios_caracteres_chave_invalida_vazia():
assert cifra_de_cesar.encriptar_modo_varios_caracteres([''], 'abc') == dicionarios.retorna_erro_chave()
assert cifra_de_cesar.traduzir_modo_varios_caracteres([''], 'abc') == dicionarios.retorna_erro_chave()
def test_cifra_de_cesar_varios_caracteres_chave_invalida_negativa():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['-1'], 'a') == dicionarios.retorna_erro_chave()
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['-1'], 'a') == dicionarios.retorna_erro_chave()
def test_cifra_de_cesar_varios_caracteres_chave_invalida_texto():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['texto'], 'a') == dicionarios.retorna_erro_chave()
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['texto'], 'a') == dicionarios.retorna_erro_chave()
def test_cifra_de_cesar_varios_caracteres_chave_invalida_float():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['1.2'], 'a') == dicionarios.retorna_erro_chave()
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['1.2'], 'a') == dicionarios.retorna_erro_chave()
def test_cifra_de_cesar_varios_caracteres_mensagem_invalida():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['1'], '') == dicionarios.retorna_erro_mensagem()
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['1'], '') == dicionarios.retorna_erro_mensagem()
def test_cifra_de_cesar_varios_caracteres_volta():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['1'], '˞˝') == ' ˞'
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['1'],' ˞') == '˞˝'
def test_cifra_de_cesar_varios_caracteres_maiusc_minus():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['1'], 'aAbBcCdD') == 'bBcCdDeE'
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['1'], 'bBcCdDeE') == 'aAbBcCdD'
def test_cifra_de_cesar_varios_caracteres_caracteres_especiais():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['1'], 'áéíóú!? abc') == 'âêîôû"@!bcd'
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['1'], 'âêîôû"@!bcd') == 'áéíóú!? abc'
def test_cifra_de_cesar_varios_caracteres_acima_do_limite():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['1'], '˟') == '˟'
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['1'], '˟') == '˟'
def test_cifra_de_cesar_varios_caracteres_chave_maior():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['123'], 'a') == 'ÿ'
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['123'], 'ÿ') == 'a'
def test_cifra_de_cesar_varios_caracteres_texto_grande_1():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['123'],
'Olá ! Será que troca letras com acentos também ? E espaços ? Vamos testar agora !'
) == 'íĊŜ¾¿¾ñăĐŜ¾ďēă¾ĒĐčāÿ¾ĊăĒĐÿđ¾āčċ¾ÿāăČĒčđ¾ĒÿċĀŤċ¾Ý¾ã¾ăđĎÿŢčđ¾Ý¾ôÿċčđ¾ĒăđĒÿĐ¾ÿąčĐÿ¾¿'
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['123'],
'íĊŜ¾¿¾ñăĐŜ¾ďēă¾ĒĐčāÿ¾ĊăĒĐÿđ¾āčċ¾ÿāăČĒčđ¾ĒÿċĀŤċ¾Ý¾ã¾ăđĎÿŢčđ¾Ý¾ôÿċčđ¾ĒăđĒÿĐ¾ÿąčĐÿ¾¿'
) == 'Olá ! Será que troca letras com acentos também ? E espaços ? Vamos testar agora !'
def test_cifra_de_cesar_apenas_letras_enript_texto_grande_2():
assert cifra_de_cesar.encriptar_modo_varios_caracteres(['321'],
'Legal ! Parece que está tudo funcionando corretamente, vamos ver como o texto fica movendo mais ainda !!!'
) == 'ưljNjDžǐƄƅƄƴDžǖljLJljƄǕǙljƄljǗǘȢƄǘǙLjǓƄNJǙǒLJǍǓǒDžǒLjǓƄLJǓǖǖljǘDžǑljǒǘljƐƄǚDžǑǓǗƄǚljǖƄLJǓǑǓƄǓƄǘljǜǘǓƄNJǍLJDžƄǑǓǚljǒLjǓƄǑDžǍǗƄDžǍǒLjDžƄƅƅƅ'
assert cifra_de_cesar.traduzir_modo_varios_caracteres(['321'],
'ưljNjDžǐƄƅƄƴDžǖljLJljƄǕǙljƄljǗǘȢƄǘǙLjǓƄNJǙǒLJǍǓǒDžǒLjǓƄLJǓǖǖljǘDžǑljǒǘljƐƄǚDžǑǓǗƄǚljǖƄLJǓǑǓƄǓƄǘljǜǘǓƄNJǍLJDžƄǑǓǚljǒLjǓƄǑDžǍǗƄDžǍǒLjDžƄƅƅƅ'
) == 'Legal ! Parece que está tudo funcionando corretamente, vamos ver como o texto fica movendo mais ainda !!!'
| 61.033898
| 116
| 0.784088
| 1,012
| 7,202
| 5.246047
| 0.117589
| 0.100207
| 0.171784
| 0.169523
| 0.883782
| 0.881522
| 0.863063
| 0.826521
| 0.719533
| 0.407986
| 0
| 0.010129
| 0.095251
| 7,202
| 117
| 117
| 61.555556
| 0.777931
| 0.006248
| 0
| 0
| 0
| 0
| 0.17948
| 0.066536
| 0
| 0
| 0
| 0
| 0.561798
| 1
| 0.280899
| true
| 0
| 0.022472
| 0
| 0.303371
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c4871856b6fb7c31d8827cf04c9a0a8b46d46229
| 82
|
py
|
Python
|
vg/compat/v2.py
|
lace/vx
|
33134cae43d7729b6128b198119e1593035066ae
|
[
"BSD-2-Clause"
] | 100
|
2019-01-18T05:08:34.000Z
|
2022-03-24T09:59:11.000Z
|
vg/compat/v2.py
|
lace/vg
|
bece5191756b43378e882fd1fdf0ffa45a06e467
|
[
"BSD-2-Clause"
] | 153
|
2018-11-16T17:44:28.000Z
|
2022-03-10T23:33:50.000Z
|
vg/compat/v2.py
|
lace/vx
|
33134cae43d7729b6128b198119e1593035066ae
|
[
"BSD-2-Clause"
] | 14
|
2019-05-17T15:05:52.000Z
|
2022-03-09T08:42:53.000Z
|
from .. import shape # noqa: F401, F403
from ..core import * # noqa: F401, F403
| 27.333333
| 40
| 0.646341
| 12
| 82
| 4.416667
| 0.583333
| 0.301887
| 0.45283
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1875
| 0.219512
| 82
| 2
| 41
| 41
| 0.640625
| 0.402439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
67393e54cdc9e8f6a0c8e35f2fbdb2ca0c85c994
| 3,745
|
py
|
Python
|
models/link_prediction/hole.py
|
Minys233/GCN-BMP
|
21b64a3c8cc9bc33718ae09c65aa917e575132eb
|
[
"MIT"
] | null | null | null |
models/link_prediction/hole.py
|
Minys233/GCN-BMP
|
21b64a3c8cc9bc33718ae09c65aa917e575132eb
|
[
"MIT"
] | null | null | null |
models/link_prediction/hole.py
|
Minys233/GCN-BMP
|
21b64a3c8cc9bc33718ae09c65aa917e575132eb
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 3/14/2019 4:03 PM
# @Author : chinshin
# @FileName: hole.py
import chainer
from chainer import links
from chainer import functions
from chainer.functions import relu
class HOLE(chainer.Chain):
def __init__(self, out_dim, hidden_dims=(32, 16), activation=relu):
super(HOLE, self).__init__()
hidden_layers = [links.Linear(None, hidden_dim) for hidden_dim in hidden_dims]
with self.init_scope():
self.hidden_layers = chainer.ChainList(*hidden_layers)
self.l_out = links.Linear(None, out_dim)
self.activation = activation
def __call__(self, left_x, right_x):
h = self.circular_correlation(left_x, right_x)
for l in self.hidden_layers:
h = self.activation(l(h))
h = self.l_out(h)
return h
def circular_correlation(self, left_x, right_x):
"""
Computes the circular correlation of two vectors a and b via their fast fourier transforms
In python code, ifft(np.conj(fft(a)) * fft(b)).real
:param left_x:
:param right_x:
(a - j * b) * (c + j * d) = (ac + bd) + j * (ad - bc)
:return:
"""
left_x_real = left_x
left_x_imag = chainer.as_variable(self.xp.zeros_like(left_x_real, dtype=self.xp.float32))
left_x_fft_real, left_x_fft_imag = functions.fft((left_x_real, left_x_imag))
right_x_real = right_x
right_x_imag = chainer.as_variable(self.xp.zeros_like(right_x_real, dtype=self.xp.float32))
right_x_fft_real, right_x_fft_imag = functions.fft((right_x_real, right_x_imag))
prod_fft_real = left_x_fft_real * right_x_fft_real + left_x_fft_imag * right_x_fft_imag
prod_fft_imag = left_x_fft_real * right_x_fft_imag - left_x_fft_imag * right_x_fft_real
ifft_real, _ = functions.ifft((prod_fft_real, prod_fft_imag))
return ifft_real
class HolE(chainer.Chain):
def __init__(self, out_dim, hidden_dims=(32, 16), activation=relu):
super(HolE, self).__init__()
hidden_layers = [links.Linear(None, hidden_dim) for hidden_dim in hidden_dims]
with self.init_scope():
self.hidden_layers = chainer.ChainList(*hidden_layers)
self.l_out = links.Linear(None, out_dim)
self.activation = activation
def __call__(self, left_x, right_x):
h = self.circular_correlation(left_x, right_x)
for l in self.hidden_layers:
h = self.activation(l(h))
h = self.l_out(h)
return h
def circular_correlation(self, left_x, right_x):
"""
Computes the circular correlation of two vectors a and b via their fast fourier transforms
In python code, ifft(np.conj(fft(a)) * fft(b)).real
:param left_x:
:param right_x:
(a - j * b) * (c + j * d) = (ac + bd) + j * (ad - bc)
:return:
"""
left_x_real = left_x
left_x_imag = chainer.as_variable(self.xp.zeros_like(left_x_real, dtype=self.xp.float32))
left_x_fft_real, left_x_fft_imag = functions.fft((left_x_real, left_x_imag))
right_x_real = right_x
right_x_imag = chainer.as_variable(self.xp.zeros_like(right_x_real, dtype=self.xp.float32))
right_x_fft_real, right_x_fft_imag = functions.fft((right_x_real, right_x_imag))
prod_fft_real = left_x_fft_real * right_x_fft_real + left_x_fft_imag * right_x_fft_imag
prod_fft_imag = left_x_fft_real * right_x_fft_imag - left_x_fft_imag * right_x_fft_real
ifft_real, _ = functions.ifft((prod_fft_real, prod_fft_imag))
return ifft_real
| 41.153846
| 100
| 0.644059
| 566
| 3,745
| 3.879859
| 0.160777
| 0.07286
| 0.043716
| 0.030055
| 0.92714
| 0.92714
| 0.92714
| 0.92714
| 0.92714
| 0.92714
| 0
| 0.009691
| 0.256075
| 3,745
| 91
| 101
| 41.153846
| 0.778536
| 0.156475
| 0
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.074074
| 0
| 0.296296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
675c1866604784da69a34384349a264596a506e7
| 4,784
|
py
|
Python
|
sandbox/grist/test_twowaymap.py
|
nataliemisasi/grist-core
|
52d3f6320339b23ed0155009f45ff7121d90e3b8
|
[
"Apache-2.0"
] | 2,667
|
2020-10-30T16:25:06.000Z
|
2022-03-31T15:27:37.000Z
|
sandbox/grist/test_twowaymap.py
|
nataliemisasi/grist-core
|
52d3f6320339b23ed0155009f45ff7121d90e3b8
|
[
"Apache-2.0"
] | 137
|
2020-12-04T08:14:09.000Z
|
2022-03-31T22:36:13.000Z
|
sandbox/grist/test_twowaymap.py
|
nataliemisasi/grist-core
|
52d3f6320339b23ed0155009f45ff7121d90e3b8
|
[
"Apache-2.0"
] | 103
|
2020-10-30T15:17:51.000Z
|
2022-03-28T17:02:04.000Z
|
import unittest
import twowaymap
class TestTwoWayMap(unittest.TestCase):
def assertTwoWayMap(self, twmap, forward, reverse):
map_repr = (
{ k: twmap.lookup_left(k) for k in twmap.left_all() },
{ k: twmap.lookup_right(k) for k in twmap.right_all() }
)
self.assertEqual(map_repr, (forward, reverse))
def test_set_list(self):
tmap = twowaymap.TwoWayMap(left=set, right=list)
self.assertFalse(tmap)
tmap.insert(1, "a")
self.assertTrue(tmap)
self.assertTwoWayMap(tmap, {1: ["a"]}, {"a": {1}})
tmap.insert(1, "a") # should be a no-op, since this pair already exists
tmap.insert(1, "b")
tmap.insert(2, "a")
self.assertTwoWayMap(tmap, {1: ["a", "b"], 2: ["a"]}, {"a": {1,2}, "b": {1}})
tmap.insert(1, "b")
tmap.insert(2, "b")
self.assertTwoWayMap(tmap, {1: ["a", "b"], 2: ["a", "b"]}, {"a": {1,2}, "b": {1,2}})
tmap.remove(1, "b")
tmap.remove(2, "b")
self.assertTwoWayMap(tmap, {1: ["a"], 2: ["a"]}, {"a": {1,2}})
tmap.insert(1, "b")
tmap.insert(2, "b")
tmap.remove_left(1)
self.assertTwoWayMap(tmap, {2: ["a", "b"]}, {"a": {2}, "b": {2}})
tmap.insert(1, "a")
tmap.insert(2, "b")
tmap.remove_right("b")
self.assertTwoWayMap(tmap, {1: ["a"], 2: ["a"]}, {"a": {1,2}})
self.assertTrue(tmap)
tmap.clear()
self.assertTwoWayMap(tmap, {}, {})
self.assertFalse(tmap)
def test_set_single(self):
tmap = twowaymap.TwoWayMap(left=set, right="single")
self.assertFalse(tmap)
tmap.insert(1, "a")
self.assertTrue(tmap)
self.assertTwoWayMap(tmap, {1: "a"}, {"a": {1}})
tmap.insert(1, "a") # should be a no-op, since this pair already exists
tmap.insert(1, "b")
tmap.insert(2, "a")
self.assertTwoWayMap(tmap, {1: "b", 2: "a"}, {"a": {2}, "b": {1}})
tmap.insert(1, "b")
tmap.insert(2, "b")
self.assertTwoWayMap(tmap, {1: "b", 2: "b"}, {"b": {1,2}})
tmap.remove(1, "b")
self.assertTwoWayMap(tmap, {2: "b"}, {"b": {2}})
tmap.remove(2, "b")
self.assertTwoWayMap(tmap, {}, {})
tmap.insert(1, "b")
tmap.insert(2, "b")
self.assertTwoWayMap(tmap, {1: "b", 2: "b"}, {"b": {1,2}})
tmap.remove_left(1)
self.assertTwoWayMap(tmap, {2: "b"}, {"b": {2}})
tmap.insert(1, "a")
tmap.insert(2, "b")
tmap.remove_right("b")
self.assertTwoWayMap(tmap, {1: "a"}, {"a": {1}})
self.assertTrue(tmap)
tmap.clear()
self.assertTwoWayMap(tmap, {}, {})
self.assertFalse(tmap)
def test_strict_list(self):
tmap = twowaymap.TwoWayMap(left="strict", right=list)
self.assertFalse(tmap)
tmap.insert(1, "a")
self.assertTrue(tmap)
self.assertTwoWayMap(tmap, {1: ["a"]}, {"a": 1})
tmap.insert(1, "a") # should be a no-op, since this pair already exists
tmap.insert(1, "b")
with self.assertRaises(ValueError):
tmap.insert(2, "a")
self.assertTwoWayMap(tmap, {1: ["a", "b"]}, {"a": 1, "b": 1})
tmap.insert(1, "b")
with self.assertRaises(ValueError):
tmap.insert(2, "b")
tmap.insert(2, "c")
self.assertTwoWayMap(tmap, {1: ["a", "b"], 2: ["c"]}, {"a": 1, "b": 1, "c": 2})
tmap.remove(1, "b")
self.assertTwoWayMap(tmap, {1: ["a"], 2: ["c"]}, {"a": 1, "c": 2})
tmap.remove(2, "b")
self.assertTwoWayMap(tmap, {1: ["a"], 2: ["c"]}, {"a": 1, "c": 2})
tmap.insert(1, "b")
with self.assertRaises(ValueError):
tmap.insert(2, "b")
self.assertTwoWayMap(tmap, {1: ["a", "b"], 2: ["c"]}, {"a": 1, "b": 1, "c": 2})
tmap.remove_left(1)
self.assertTwoWayMap(tmap, {2: ["c"]}, {"c": 2})
tmap.insert(1, "a")
tmap.insert(2, "b")
tmap.remove_right("b")
self.assertTwoWayMap(tmap, {1: ["a"], 2: ["c"]}, {"a": 1, "c": 2})
self.assertTrue(tmap)
tmap.clear()
self.assertTwoWayMap(tmap, {}, {})
self.assertFalse(tmap)
def test_strict_single(self):
tmap = twowaymap.TwoWayMap(left="strict", right="single")
tmap.insert(1, "a")
tmap.insert(2, "b")
tmap.insert(2, "c")
self.assertTwoWayMap(tmap, {1: "a", 2: "c"}, {"a": 1, "c": 2})
with self.assertRaises(ValueError):
tmap.insert(2, "a")
tmap.insert(2, "c") # This pair already exists, so not an error.
self.assertTwoWayMap(tmap, {1: "a", 2: "c"}, {"a": 1, "c": 2})
def test_nonhashable(self):
# Test that we don't get into an inconsistent state if we attempt to use a non-hashable value.
tmap = twowaymap.TwoWayMap(left=list, right=list)
tmap.insert(1, "a")
self.assertTwoWayMap(tmap, {1: ["a"]}, {"a": [1]})
with self.assertRaises(TypeError):
tmap.insert(1, {})
with self.assertRaises(TypeError):
tmap.insert({}, "a")
self.assertTwoWayMap(tmap, {1: ["a"]}, {"a": [1]})
if __name__ == "__main__":
unittest.main()
| 30.864516
| 98
| 0.562918
| 690
| 4,784
| 3.86087
| 0.105797
| 0.146396
| 0.250375
| 0.189189
| 0.838213
| 0.822823
| 0.813814
| 0.722222
| 0.635886
| 0.60961
| 0
| 0.034573
| 0.201923
| 4,784
| 154
| 99
| 31.064935
| 0.663174
| 0.059574
| 0
| 0.745902
| 0
| 0
| 0.036501
| 0
| 0
| 0
| 0
| 0
| 0.401639
| 1
| 0.04918
| false
| 0
| 0.016393
| 0
| 0.07377
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
67819aea68f1999e10d3dbf8774060a0d5a49e40
| 49,395
|
py
|
Python
|
dlkit/manager_impls/commenting/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 2
|
2018-02-23T12:16:11.000Z
|
2020-10-08T17:54:24.000Z
|
dlkit/manager_impls/commenting/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 87
|
2017-04-21T18:57:15.000Z
|
2021-12-13T19:43:57.000Z
|
dlkit/manager_impls/commenting/managers.py
|
UOC/dlkit
|
a9d265db67e81b9e0f405457464e762e2c03f769
|
[
"MIT"
] | 1
|
2018-03-01T16:44:25.000Z
|
2018-03-01T16:44:25.000Z
|
"""Manager utility implementations of commenting managers."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods
# Number of methods are defined in specification
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from ..osid import managers as osid_managers
from ..osid.osid_errors import NullArgument
from ..osid.osid_errors import Unimplemented
from ..type.objects import TypeList
from dlkit.abstract_osid.commenting import managers as abc_commenting_managers
class CommentingProfile(abc_commenting_managers.CommentingProfile, osid_managers.OsidProfile):
"""The commenting profile describes the interoperability among commenting services."""
def supports_visible_federation(self):
"""Tests if any book federation is exposed.
Federation is exposed when a specific book may be identified,
selected and used to create a lookup or admin session.
Federation is not exposed when a set of books appears as a
single book.
return: (boolean) - ``true`` if visible federation is supported,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_comment_lookup(self):
"""Tests for the availability of a comment lookup service.
return: (boolean) - ``true`` if comment lookup is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_rating_lookup(self):
"""Tests for the availability of a rating lookup service.
return: (boolean) - ``true`` if rating lookup is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_comment_query(self):
"""Tests if querying comments is available.
return: (boolean) - ``true`` if comment query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_comment_search(self):
"""Tests if searching for comments is available.
return: (boolean) - ``true`` if comment search is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_comment_admin(self):
"""Tests if managing comments is available.
return: (boolean) - ``true`` if comment admin is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_comment_notification(self):
"""Tests if comment notification is available.
return: (boolean) - ``true`` if comment notification is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_comment_book(self):
"""Tests if a comment to book lookup session is available.
return: (boolean) - ``true`` if comment book lookup session is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_comment_book_assignment(self):
"""Tests if a comment to book assignment session is available.
return: (boolean) - ``true`` if comment book assignment is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_comment_smart_book(self):
"""Tests if a comment smart booking session is available.
return: (boolean) - ``true`` if comment smart booking is
supported, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_book_lookup(self):
"""Tests for the availability of an book lookup service.
return: (boolean) - ``true`` if book lookup is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_book_query(self):
"""Tests if querying books is available.
return: (boolean) - ``true`` if book query is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_book_search(self):
"""Tests if searching for books is available.
return: (boolean) - ``true`` if book search is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_book_admin(self):
"""Tests for the availability of a book administrative service for creating and deleting books.
return: (boolean) - ``true`` if book administration is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_book_notification(self):
"""Tests for the availability of a book notification service.
return: (boolean) - ``true`` if book notification is available,
``false`` otherwise
*compliance: mandatory -- This method must be implemented in all
providers.*
"""
return False
def supports_book_hierarchy(self):
"""Tests for the availability of a book hierarchy traversal service.
return: (boolean) - ``true`` if book hierarchy traversal is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented.*
"""
return False
def supports_book_hierarchy_design(self):
"""Tests for the availability of a book hierarchy design service.
return: (boolean) - ``true`` if book hierarchy design is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented in all
providers.*
"""
return False
def supports_commenting_batch(self):
"""Tests for the availability of a commenting batch service.
return: (boolean) - ``true`` if commenting batch service is
available, ``false`` otherwise
*compliance: mandatory -- This method must be implemented in all
providers.*
"""
return False
def get_comment_record_types(self):
"""Gets the supported ``Comment`` record types.
return: (osid.type.TypeList) - a list containing the supported
comment record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
comment_record_types = property(fget=get_comment_record_types)
def supports_comment_record_type(self, comment_record_type=None):
"""Tests if the given ``Comment`` record type is supported.
arg: comment_record_type (osid.type.Type): a ``Type``
indicating a ``Comment`` record type
return: (boolean) - ``true`` if the given ``Type`` is supported,
``false`` otherwise
raise: NullArgument - ``comment_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if comment_record_type is None:
raise NullArgument()
return False
def get_comment_search_record_types(self):
"""Gets the supported comment search record types.
return: (osid.type.TypeList) - a list containing the supported
comment search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
comment_search_record_types = property(fget=get_comment_search_record_types)
def supports_comment_search_record_type(self, comment_search_record_type=None):
"""Tests if the given comment search record type is supported.
arg: comment_search_record_type (osid.type.Type): a ``Type``
indicating a comment record type
return: (boolean) - ``true`` if the given ``Type`` is supported,
``false`` otherwise
raise: NullArgument - ``comment_search_record_type`` is
``null``
*compliance: mandatory -- This method must be implemented.*
"""
if comment_search_record_type is None:
raise NullArgument()
return False
def get_book_record_types(self):
"""Gets the supported ``Book`` record types.
return: (osid.type.TypeList) - a list containing the supported
book record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
book_record_types = property(fget=get_book_record_types)
def supports_book_record_type(self, book_record_type=None):
"""Tests if the given ``Book`` record type is supported.
arg: book_record_type (osid.type.Type): a ``Type`` indicating
a ``Book`` record type
return: (boolean) - ``true`` if the given ``Type`` is supported,
``false`` otherwise
raise: NullArgument - ``book_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if book_record_type is None:
raise NullArgument()
return False
def get_book_search_record_types(self):
"""Gets the supported book search record types.
return: (osid.type.TypeList) - a list containing the supported
book search record types
*compliance: mandatory -- This method must be implemented.*
"""
return TypeList([])
book_search_record_types = property(fget=get_book_search_record_types)
def supports_book_search_record_type(self, book_search_record_type=None):
"""Tests if the given book search record type is supported.
arg: book_search_record_type (osid.type.Type): a ``Type``
indicating a book record type
return: (boolean) - ``true`` if the given ``Type`` is supported,
``false`` otherwise
raise: NullArgument - ``book_search_record_type`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
if book_search_record_type is None:
raise NullArgument()
return False
class CommentingManager(abc_commenting_managers.CommentingManager, osid_managers.OsidManager, CommentingProfile):
"""The commenting manager provides access to commenting sessions and provides interoperability tests for various aspects of this service.
The sessions included in this manager are:
* ``CommentLookupSession:`` a session to lookup comments
* ``RatingLookupSession:`` a session to lookup comments
* ``CommentQuerySession:`` a session to query comments
* ``CommentSearchSession:`` a session to search comments
* ``CommentAdminSession:`` a session to manage comments
* ``CommentNotificationSession:`` a session to subscribe to
notifications of comment changes
* ``CommentBookSession:`` a session for looking up comment and
book mappings
* ``CommentBookAssignmentSession:`` a session for managing comment
and book mappings
* ``CommentSmartBookSession:`` a session to manage dynamic comment
books
* ``BookLookupSession:`` a session to retrieve books
* ``BookQuerySession:`` a session to query books
* ``BookSearchSession:`` a session to search for books
* ``BookAdminSession:`` a session to create, update and delete
books
* ``BookNotificationSession:`` a session to receive notifications
for changes in books
* ``BookHierarchyTraversalSession:`` a session to traverse
hierarchies of books
* ``BookHierarchyDesignSession:`` a session to manage hierarchies
of books
The commenting manager also provides a profile for determing the
supported search types supported by this service.
"""
def get_comment_lookup_session(self):
"""Gets the ``OsidSession`` associated with the comment lookup service.
return: (osid.commenting.CommentLookupSession) - a
``CommentLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_lookup()`` is ``true``.*
"""
raise Unimplemented()
comment_lookup_session = property(fget=get_comment_lookup_session)
def get_comment_lookup_session_for_book(self, book_id=None):
"""Gets the ``OsidSession`` associated with the comment lookup service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
return: (osid.commenting.CommentLookupSession) - a
``CommentLookupSession``
raise: NotFound - no ``Book`` found by the given ``Id``
raise: NullArgument - ``book_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None:
raise NullArgument
raise Unimplemented()
def get_rating_lookup_session(self):
"""Gets the ``OsidSession`` associated with the rating lookup service.
return: (osid.commenting.RatingLookupSession) - a
``RatingLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_rating_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_rating_lookup()`` is ``true``.*
"""
raise Unimplemented()
rating_lookup_session = property(fget=get_rating_lookup_session)
def get_rating_lookup_session_for_book(self, book_id=None):
"""Gets the ``OsidSession`` associated with the rating lookup service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
return: (osid.commenting.RatingLookupSession) - a
``RatingLookupSession``
raise: NotFound - no ``Book`` found by the given ``Id``
raise: NullArgument - ``book_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_rating_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_rating_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None:
raise NullArgument
raise Unimplemented()
def get_comment_query_session(self):
"""Gets the ``OsidSession`` associated with the comment query service.
return: (osid.commenting.CommentQuerySession) - a
``CommentQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_query()`` is ``true``.*
"""
raise Unimplemented()
comment_query_session = property(fget=get_comment_query_session)
def get_comment_query_session_for_book(self, book_id=None):
"""Gets the ``OsidSession`` associated with the comment query service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
return: (osid.commenting.CommentQuerySession) - a
``CommentQuerySession``
raise: NotFound - no ``Book`` found by the given ``Id``
raise: NullArgument - ``book_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_query()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None:
raise NullArgument
raise Unimplemented()
def get_comment_search_session(self):
"""Gets the ``OsidSession`` associated with the comment search service.
return: (osid.commenting.CommentSearchSession) - a
``CommentSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_search()`` is ``true``.*
"""
raise Unimplemented()
comment_search_session = property(fget=get_comment_search_session)
def get_comment_search_session_for_book(self, book_id=None):
"""Gets the ``OsidSession`` associated with the comment search service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
return: (osid.commenting.CommentSearchSession) - a
``CommentSearchSession``
raise: NotFound - no ``Book`` found by the given ``Id``
raise: NullArgument - ``book_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_search()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None:
raise NullArgument
raise Unimplemented()
def get_comment_admin_session(self):
"""Gets the ``OsidSession`` associated with the comment administration service.
return: (osid.commenting.CommentAdminSession) - a
``CommentAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_admin()`` is ``true``.*
"""
raise Unimplemented()
comment_admin_session = property(fget=get_comment_admin_session)
def get_comment_admin_session_for_book(self, book_id=None):
"""Gets the ``OsidSession`` associated with the comment administration service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
return: (osid.commenting.CommentAdminSession) - a
``CommentAdminSession``
raise: NotFound - no ``Book`` found by the given ``Id``
raise: NullArgument - ``book_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_admin()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None:
raise NullArgument
raise Unimplemented()
def get_comment_notification_session(self, comment_receiver=None):
"""Gets the ``OsidSession`` associated with the comment notification service.
arg: comment_receiver (osid.commenting.CommentReceiver): the
receiver
return: (osid.commenting.CommentNotificationSession) - a
``CommentNotificationSession``
raise: NullArgument - ``comment_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_comment_notification_session_for_book(self, comment_receiver=None, book_id=None):
"""Gets the ``OsidSession`` associated with the comment notification service for the given book.
arg: comment_receiver (osid.commenting.CommentReceiver): the
receiver
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
return: (osid.commenting.CommentNotificationSession) - a
``CommentNotificationSession``
raise: NotFound - no ``Book`` found by the given ``Id``
raise: NullArgument - ``comment_receiver`` or ``book_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
if comment_receiver is None:
raise NullArgument
if book_id is None:
raise NullArgument
raise Unimplemented()
def get_comment_book_session(self):
"""Gets the session for retrieving comment to book mappings.
return: (osid.commenting.CommentBookSession) - a
``CommentBookSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_book()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_book()`` is ``true``.*
"""
raise Unimplemented()
comment_book_session = property(fget=get_comment_book_session)
def get_comment_book_assignment_session(self):
"""Gets the session for assigning comment to book mappings.
return: (osid.commenting.CommentBookAssignmentSession) - a
``CommentBookAssignmentSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_book_assignment()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_book_assignment()`` is ``true``.*
"""
raise Unimplemented()
comment_book_assignment_session = property(fget=get_comment_book_assignment_session)
def get_comment_smart_book_session(self, book_id=None):
"""Gets the session associated with the comment smart book for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the book
return: (osid.commenting.CommentSmartBookSession) - a
``CommentSmartBookSession``
raise: NotFound - ``book_id`` not found
raise: NullArgument - ``book_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_smart_book()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_smart_book()`` is ``true``.*
"""
if book_id is None:
raise NullArgument
raise Unimplemented()
def get_book_lookup_session(self):
"""Gets the ``OsidSession`` associated with the book lookup service.
return: (osid.commenting.BookLookupSession) - a
``BookLookupSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_book_lookup()`` is ``true``.*
"""
raise Unimplemented()
book_lookup_session = property(fget=get_book_lookup_session)
def get_book_query_session(self):
"""Gets the ``OsidSession`` associated with the book query service.
return: (osid.commenting.BookQuerySession) - a
``BookQuerySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_query()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_book_query()`` is ``true``.*
"""
raise Unimplemented()
book_query_session = property(fget=get_book_query_session)
def get_book_search_session(self):
"""Gets the ``OsidSession`` associated with the book search service.
return: (osid.commenting.BookSearchSession) - a
``BookSearchSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_book_search()`` is ``true``.*
"""
raise Unimplemented()
book_search_session = property(fget=get_book_search_session)
def get_book_admin_session(self):
"""Gets the ``OsidSession`` associated with the book administrative service.
return: (osid.commenting.BookAdminSession) - a
``BookAdminSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_book_admin()`` is ``true``.*
"""
raise Unimplemented()
book_admin_session = property(fget=get_book_admin_session)
def get_book_notification_session(self, book_receiver=None):
"""Gets the ``OsidSession`` associated with the book notification service.
arg: book_receiver (osid.commenting.BookReceiver): the
receiver
return: (osid.commenting.BookNotificationSession) - a
``BookNotificationSession``
raise: NullArgument - ``book_receiver`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_book_notification()`` is ``true``.*
"""
raise Unimplemented()
def get_book_hierarchy_session(self):
"""Gets the ``OsidSession`` associated with the book hierarchy service.
return: (osid.commenting.BookHierarchySession) - a
``BookHierarchySession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_book_hierarchy()`` is ``true``.*
"""
raise Unimplemented()
book_hierarchy_session = property(fget=get_book_hierarchy_session)
def get_book_hierarchy_design_session(self):
"""Gets the ``OsidSession`` associated with the book hierarchy design service.
return: (osid.commenting.BookHierarchyDesignSession) - a
``BookHierarchyDesignSession``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_hierarchy_design()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_book_hierarchy_design()`` is ``true``.*
"""
raise Unimplemented()
book_hierarchy_design_session = property(fget=get_book_hierarchy_design_session)
def get_commenting_batch_manager(self):
"""Gets a ``CommentingBatchManager``.
return: (osid.commenting.batch.CommentingBatchManager) - a
``CommentingBatchManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_commenting_batch()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_commenting_batch()`` is ``true``.*
"""
raise Unimplemented()
commenting_batch_manager = property(fget=get_commenting_batch_manager)
class CommentingProxyManager(abc_commenting_managers.CommentingProxyManager, osid_managers.OsidProxyManager, CommentingProfile):
"""The commenting manager provides access to commenting sessions and provides interoperability tests for various aspects of this service.
Methods in this manager accept a ``Proxy`` for passing information
from a server environment. The sessions included in this manager
are:
* ``CommentLookupSession:`` a session to lookup comments
* ``RatingLookupSession:`` a session to lookup comments
* ``CommentQuerySession:`` a session to query comments
* ``CommentSearchSession:`` a session to search comments
* ``CommentAdminSession:`` a session to manage comments
* ``CommentNotificationSession:`` a session to subscribe to
notifications of comment changes
* ``CommentBookSession:`` a session for looking up comment and
book mappings
* ``CommentBookAssignmentSession:`` a session for managing comment
and book mappings
* ``CommentSmartBookSession:`` a session to manage dynamic comment
books
* ``BookLookupSession:`` a session to retrieve books
* ``BookQuerySession:`` a session to query books
* ``BookSearchSession:`` a session to search for books
* ``BookAdminSession:`` a session to create, update and delete
books
* ``BookNotificationSession:`` a session to receive notifications
for changes in books
* ``BookHierarchyTraversalSession:`` a session to traverse
hierarchies of books
* ``BookHierarchyDesignSession:`` a session to manage hierarchies
of books
The commenting manager also provides a profile for determing the
supported search types supported by this service.
"""
def get_comment_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the comment lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentLookupSession) - a
``CommentLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_comment_lookup_session_for_book(self, book_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the comment lookup service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentLookupSession) - a
``CommentLookupSession``
raise: NotFound - no ``Book`` found by the given ``Id``
raise: NullArgument - ``book_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_rating_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the rating lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.RatingLookupSession) - a
``RatingLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_rating_lookup()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_rating_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_rating_lookup_session_for_book(self, book_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the rating lookup service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.RatingLookupSession) - a
``RatingLookupSession``
raise: NotFound - no ``Book`` found by the given ``Id``
raise: NullArgument - ``book_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_rating_lookup()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_rating_lookup()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_comment_query_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the comment query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentQuerySession) - a
``CommentQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_query()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_comment_query_session_for_book(self, book_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the comment query service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentQuerySession) - a
``CommentQuerySession``
raise: NotFound - no ``Comment`` found by the given ``Id``
raise: NullArgument - ``book_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_query()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_comment_search_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the comment search service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentSearchSession) - a
``CommentSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_search()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_comment_search_session_for_book(self, book_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the comment search service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentSearchSession) - a
``CommentSearchSession``
raise: NotFound - no ``Comment`` found by the given ``Id``
raise: NullArgument - ``book_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_search()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_search()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_comment_admin_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the comment administration service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentAdminSession) - a
``CommentAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_admin()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_comment_admin_session_for_book(self, book_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the comment administration service for the given book.
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentAdminSession) - a
``CommentAdminSession``
raise: NotFound - no ``Comment`` found by the given ``Id``
raise: NullArgument - ``book_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_admin()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_admin()`` and
``supports_visible_federation()`` are ``true``*
"""
if book_id is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_comment_notification_session(self, comment_receiver=None, proxy=None):
"""Gets the ``OsidSession`` associated with the comment notification service.
arg: comment_receiver (osid.commenting.CommentReceiver): the
receiver
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentNotificationSession) - a
``CommentNotificationSession``
raise: NullArgument - ``comment_receiver`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_comment_notification_session_for_book(self, comment_receiver=None, book_id=None, proxy=None):
"""Gets the ``OsidSession`` associated with the comment notification service for the given book.
arg: comment_receiver (osid.commenting.CommentReceiver): the
receiver
arg: book_id (osid.id.Id): the ``Id`` of the ``Book``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentNotificationSession) - a
``CommentNotificationSession``
raise: NotFound - no ``Comment`` found by the given ``Id``
raise: NullArgument - ``comment_receiver, book_id`` or
``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_notification()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_notification()`` and
``supports_visible_federation()`` are ``true``*
"""
if comment_receiver is None or proxy is None:
raise NullArgument
raise Unimplemented()
def get_comment_book_session(self, proxy=None):
"""Gets the session for retrieving comment to book mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentBookSession) - a
``CommentBookSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_book()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_book()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_comment_book_assignment_session(self, proxy=None):
"""Gets the session for assigning comment to book mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentBookAssignmentSession) - a
``CommentBookAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_book_assignment()``
is ``false``
*compliance: optional -- This method must be implemented if
``supports_comment_book_assignment()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_comment_smart_book_session(self, book_id=None, proxy=None):
"""Gets the session for managing dynamic comment books for the given book.
arg: book_id (osid.id.Id): the ``Id`` of a book
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.CommentSmartBookSession) - ``book_id``
not found
raise: NotFound - ``book_id`` or ``proxy`` is ``null``
raise: NullArgument - ``book_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_comment_smart_book()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_comment_smart_book()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_book_lookup_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the book lookup service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.BookLookupSession) - a
``BookLookupSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_lookup()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_book_lookup()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_book_query_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the book query service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.BookQuerySession) - a
``BookQuerySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_queryh()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_book_query()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_book_search_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the book search service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.BookSearchSession) - a
``BookSearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_search()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_book_search()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_book_admin_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the book administrative service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.BookAdminSession) - a
``BookAdminSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_admin()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_book_admin()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_book_notification_session(self, book_receiver=None, proxy=None):
"""Gets the ``OsidSession`` associated with the book notification service.
arg: book_receiver (osid.commenting.BookReceiver): the
receiver
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.BookNotificationSession) - a
``BookNotificationSession``
raise: NullArgument - ``book_receiver`` or ``proxy`` is
``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_notification()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_book_notification()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_book_hierarchy_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the book hierarchy service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.BookHierarchySession) - a
``BookHierarchySession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_hierarchy()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_book_hierarchy()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_book_hierarchy_design_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the book hierarchy design service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.commenting.BookHierarchyDesignSession) - a
``BookHierarchyDesignSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_book_hierarchy_design()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_book_hierarchy_design()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
def get_commenting_batch_proxy_manager(self):
"""Gets a ``CommentingBatchProxyManager``.
return: (osid.commenting.batch.CommentingBatchProxyManager) - a
``CommentingBatchProxyManager``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_commenting_batch()`` is
``false``
*compliance: optional -- This method must be implemented if
``supports_commenting_batch()`` is ``true``.*
"""
if proxy is None:
raise NullArgument()
raise Unimplemented()
commenting_batch_proxy_manager = property(fget=get_commenting_batch_proxy_manager)
| 40.256724
| 141
| 0.630874
| 5,136
| 49,395
| 5.926986
| 0.039136
| 0.0544
| 0.033113
| 0.037844
| 0.930817
| 0.9002
| 0.871358
| 0.819027
| 0.798364
| 0.758089
| 0
| 0
| 0.269379
| 49,395
| 1,226
| 142
| 40.28956
| 0.843498
| 0.670695
| 0
| 0.564315
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.298755
| false
| 0
| 0.020747
| 0
| 0.518672
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
6782111a2f8f82a7564b456f55112c712e628efe
| 3,351
|
py
|
Python
|
exchangelib/folders/__init__.py
|
mishmashclone/ecederstrand-exchangelib
|
1bbae0e527dc82a45bf3b5946b438d69de96c20f
|
[
"BSD-2-Clause"
] | 1,006
|
2016-07-18T16:42:55.000Z
|
2022-03-31T10:43:50.000Z
|
exchangelib/folders/__init__.py
|
mishmashclone/ecederstrand-exchangelib
|
1bbae0e527dc82a45bf3b5946b438d69de96c20f
|
[
"BSD-2-Clause"
] | 966
|
2016-05-13T18:55:43.000Z
|
2022-03-31T15:24:56.000Z
|
exchangelib/folders/__init__.py
|
mishmashclone/ecederstrand-exchangelib
|
1bbae0e527dc82a45bf3b5946b438d69de96c20f
|
[
"BSD-2-Clause"
] | 272
|
2016-04-05T02:17:10.000Z
|
2022-03-24T08:15:57.000Z
|
from .base import BaseFolder, Folder
from .collections import FolderCollection
from .known_folders import AdminAuditLogs, AllContacts, AllItems, ArchiveDeletedItems, ArchiveInbox, \
ArchiveMsgFolderRoot, ArchiveRecoverableItemsDeletions, ArchiveRecoverableItemsPurges, \
ArchiveRecoverableItemsRoot, ArchiveRecoverableItemsVersions, Audits, Calendar, CalendarLogging, CommonViews, \
Conflicts, Contacts, ConversationHistory, ConversationSettings, DefaultFoldersChangeHistory, DeferredAction, \
DeletedItems, Directory, Drafts, ExchangeSyncData, Favorites, Files, FreebusyData, Friends, GALContacts, \
GraphAnalytics, IMContactList, Inbox, Journal, JunkEmail, LocalFailures, Location, MailboxAssociations, Messages, \
MsgFolderRoot, MyContacts, MyContactsExtended, NonDeletableFolderMixin, Notes, Outbox, ParkedMessages, \
PassThroughSearchResults, PdpProfileV2Secured, PeopleConnect, QuickContacts, RSSFeeds, RecipientCache, \
RecoverableItemsDeletions, RecoverableItemsPurges, RecoverableItemsRoot, RecoverableItemsVersions, Reminders, \
Schedule, SearchFolders, SentItems, ServerFailures, Sharing, Shortcuts, Signal, SmsAndChatsSync, SpoolerQueue, \
SyncIssues, System, Tasks, TemporarySaves, ToDoSearch, Views, VoiceMail, WellknownFolder, WorkingSet, \
Companies, OrganizationalContacts, PeopleCentricConversationBuddies, NON_DELETABLE_FOLDERS
from .queryset import FolderQuerySet, SingleFolderQuerySet, FOLDER_TRAVERSAL_CHOICES, SHALLOW, DEEP, SOFT_DELETED
from .roots import Root, ArchiveRoot, PublicFoldersRoot, RootOfHierarchy
from ..properties import FolderId, DistinguishedFolderId
__all__ = [
'FolderId', 'DistinguishedFolderId',
'FolderCollection',
'BaseFolder', 'Folder',
'AdminAuditLogs', 'AllContacts', 'AllItems', 'ArchiveDeletedItems', 'ArchiveInbox', 'ArchiveMsgFolderRoot',
'ArchiveRecoverableItemsDeletions', 'ArchiveRecoverableItemsPurges', 'ArchiveRecoverableItemsRoot',
'ArchiveRecoverableItemsVersions', 'Audits', 'Calendar', 'CalendarLogging', 'CommonViews', 'Conflicts',
'Contacts', 'ConversationHistory', 'ConversationSettings', 'DefaultFoldersChangeHistory', 'DeferredAction',
'DeletedItems', 'Directory', 'Drafts', 'ExchangeSyncData', 'Favorites', 'Files', 'FreebusyData', 'Friends',
'GALContacts', 'GraphAnalytics', 'IMContactList', 'Inbox', 'Journal', 'JunkEmail', 'LocalFailures',
'Location', 'MailboxAssociations', 'Messages', 'MsgFolderRoot', 'MyContacts', 'MyContactsExtended',
'NonDeletableFolderMixin', 'Notes', 'Outbox', 'ParkedMessages', 'PassThroughSearchResults',
'PdpProfileV2Secured', 'PeopleConnect', 'QuickContacts', 'RSSFeeds', 'RecipientCache',
'RecoverableItemsDeletions', 'RecoverableItemsPurges', 'RecoverableItemsRoot', 'RecoverableItemsVersions',
'Reminders', 'Schedule', 'SearchFolders', 'SentItems', 'ServerFailures', 'Sharing', 'Shortcuts', 'Signal',
'SmsAndChatsSync', 'SpoolerQueue', 'SyncIssues', 'System', 'Tasks', 'TemporarySaves', 'ToDoSearch', 'Views',
'VoiceMail', 'WellknownFolder', 'WorkingSet', 'Companies', 'OrganizationalContacts',
'PeopleCentricConversationBuddies', 'NON_DELETABLE_FOLDERS',
'FolderQuerySet', 'SingleFolderQuerySet', 'FOLDER_TRAVERSAL_CHOICES', 'SHALLOW', 'DEEP', 'SOFT_DELETED',
'Root', 'ArchiveRoot', 'PublicFoldersRoot', 'RootOfHierarchy',
]
| 83.775
| 119
| 0.78305
| 216
| 3,351
| 12.078704
| 0.490741
| 0.012265
| 0.025297
| 0.039862
| 0.873898
| 0.873898
| 0.873898
| 0.873898
| 0.873898
| 0.814105
| 0
| 0.000664
| 0.101462
| 3,351
| 39
| 120
| 85.923077
| 0.865825
| 0
| 0
| 0
| 0
| 0
| 0.373918
| 0.114593
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.052632
| 0.157895
| 0
| 0.157895
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
67920da59a5aa733273efe3f586d1283de9c4dd3
| 99,720
|
py
|
Python
|
tests/cpu/test_jit.py
|
Manny27nyc/intel-extension-for-pytorch
|
b40faedf6b00d520f6483d519d2e82bce0a6c0d1
|
[
"Apache-2.0"
] | null | null | null |
tests/cpu/test_jit.py
|
Manny27nyc/intel-extension-for-pytorch
|
b40faedf6b00d520f6483d519d2e82bce0a6c0d1
|
[
"Apache-2.0"
] | null | null | null |
tests/cpu/test_jit.py
|
Manny27nyc/intel-extension-for-pytorch
|
b40faedf6b00d520f6483d519d2e82bce0a6c0d1
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import division
from __future__ import print_function
'''
From PyTorch:
Copyright (c) 2016- Facebook, Inc (Adam Paszke)
Copyright (c) 2014- Facebook, Inc (Soumith Chintala)
Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert)
Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu)
Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu)
Copyright (c) 2011-2013 NYU (Clement Farabet)
Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston)
Copyright (c) 2006 Idiap Research Institute (Samy Bengio)
Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz)
From Caffe2:
Copyright (c) 2016-present, Facebook Inc. All rights reserved.
All contributions by Facebook:
Copyright (c) 2016 Facebook Inc.
All contributions by Google:
Copyright (c) 2015 Google Inc.
All rights reserved.
All contributions by Yangqing Jia:
Copyright (c) 2015 Yangqing Jia
All rights reserved.
All contributions from Caffe:
Copyright(c) 2013, 2014, 2015, the respective contributors
All rights reserved.
All other contributions:
Copyright(c) 2015, 2016 the respective contributors
All rights reserved.
Caffe2 uses a copyright model similar to Caffe: each contributor holds
copyright over their contributions to Caffe2. The project versioning records
all such contribution and copyright details. If a contributor wants to further
mark their specific copyright on a particular contribution, they should
indicate their copyright solely in the commit message of the change when it is
committed.
All rights reserved.
'''
"""Tests for rn50."""
import math
import random
import unittest
from functools import reduce
import warnings
import itertools
import torch
import torch.nn as nn
from torch.jit._recursive import wrap_cpp_module
import torch.fx.experimental.optimization as optimization
import copy
import intel_extension_for_pytorch as ipex
import intel_extension_for_pytorch._C as core
import torch.nn as nn
import torch.backends.cudnn as cudnn
from torch.nn import Parameter
import torch.nn.functional as F
from torch.autograd import gradcheck
from torch.autograd.gradcheck import gradgradcheck
from torch._six import inf, nan
from common_utils import TestCase, iter_indices, TEST_NUMPY, TEST_SCIPY, TEST_MKL, \
TEST_LIBROSA, run_tests, download_file, skipIfNoLapack, suppress_warnings, \
IS_WINDOWS, PY3, NO_MULTIPROCESSING_SPAWN, do_test_dtypes, do_test_empty_full, \
IS_SANDCASTLE, load_tests, brute_pdist, brute_cdist, slowTest, \
skipCUDANonDefaultStreamIf, skipCUDAMemoryLeakCheckIf
device = 'cpu:0'
SIZE = 100
conv_module = {2 : torch.nn.Conv2d, 3 : torch.nn.Conv3d}
bn_module = {2 : torch.nn.BatchNorm2d, 3 : torch.nn.BatchNorm3d}
class ConvBatchNorm_Fixed(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(ConvBatchNorm_Fixed, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.bn = bn_module[dim](out_channels, eps=0.001)
def forward(self, x):
return self.bn(self.conv(x))
class BatchNormConv_Fixed(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(BatchNormConv_Fixed, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.bn = bn_module[dim](in_channels, eps=0.001)
def forward(self, x):
return self.conv(self.bn(x))
class BatchNorm_Conv_BatchNorm(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(BatchNorm_Conv_BatchNorm, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.bn1 = bn_module[dim](in_channels, eps=0.001)
self.bn2 = bn_module[dim](out_channels, eps=0.001)
def forward(self, x):
return self.bn2(self.conv(self.bn1(x)))
class ConvReshapeBatchNorm(nn.Module):
def __init__(self, dim, in_channels, out_channels, dest_shape, **kwargs):
super(ConvReshapeBatchNorm, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.dest_shape = dest_shape
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.bn = bn_module[dim](dest_shape[1], eps=0.001)
def forward(self, x):
conv_output = self.conv(x)
return self.bn(torch.reshape(conv_output, self.dest_shape))
class Conv_Conv_Concat(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(Conv_Conv_Concat, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv1 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.conv2 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
return torch.cat((self.conv1(x),self.conv2(x)))
class ConvRelu_Fixed(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(ConvRelu_Fixed, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
return F.relu(self.conv(x), inplace=True)
class Conv_Relu_Add(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(Conv_Relu_Add, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv1 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.conv2 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
return torch.add(F.relu(self.conv1(x), inplace=True),self.conv2(x))
class Conv_Scalar_Binary(nn.Module):
def __init__(self, op, dim, in_channels, out_channels, **kwargs):
super(Conv_Scalar_Binary, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, **kwargs)
self.op = op
def forward(self, x):
return self.op(self.conv(x), 2.0)
class Conv_Scalar_Binary_Add(nn.Module):
def __init__(self, op, dim, in_channels, out_channels, **kwargs):
super(Conv_Scalar_Binary_Add, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv1 = conv_module[dim](in_channels, out_channels, **kwargs)
self.conv2 = conv_module[dim](in_channels, out_channels, **kwargs)
self.op = op
def forward(self, x):
return torch.add(self.op(self.conv1(x), 2.0), self.op(self.conv2(x), 2.0))
class Conv_Tensor_Binary(nn.Module):
def __init__(self, op, dim, in_channels, out_channels, **kwargs):
super(Conv_Tensor_Binary, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, **kwargs)
self.op = op
self.tensor = torch.randn([1, out_channels, 1, 1])
def forward(self, x):
return self.op(self.conv(x), self.tensor)
class Conv_Tensor_Binary_Add(nn.Module):
def __init__(self, op, dim, in_channels, out_channels, **kwargs):
super(Conv_Tensor_Binary_Add, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv1 = conv_module[dim](in_channels, out_channels, **kwargs)
self.conv2 = conv_module[dim](in_channels, out_channels, **kwargs)
self.op = op
self.tensor = torch.randn([1, out_channels, 1, 1])
def forward(self, x):
return torch.add(self.op(self.conv1(x), self.tensor), self.op(self.conv2(x), self.tensor))
class Conv_Bn_Relu(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(Conv_Bn_Relu, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.bn = bn_module[dim](out_channels, eps=0.001)
def forward(self, x):
return F.relu(self.bn(self.conv(x)), inplace=True)
class ConvReshapeRelu(nn.Module):
def __init__(self, dim, in_channels, out_channels, dest_shape, **kwargs):
super(ConvReshapeRelu, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.dest_shape = dest_shape
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
return F.relu(torch.reshape(self.conv(x), self.dest_shape), inplace=True)
class ConvSum(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(ConvSum, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.conv1 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
a = self.conv(x)
b = self.conv1(x)
return a+b
class ConvScalarSum(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(ConvScalarSum, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
b = self.conv(x)
return b+2
class ConvBroadcastSum(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(ConvBroadcastSum, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.conv1 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
a = self.conv(x)
b = self.conv1(x)
return a[1:2].clone()+b
class ConvReshapeSum(nn.Module):
def __init__(self, dim, in_channels, out_channels, dest_shape, **kwargs):
super(ConvReshapeSum, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.dest_shape = dest_shape
self.conv1 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.conv2 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
a=torch.reshape(self.conv1(x), self.dest_shape)
b=torch.reshape(self.conv2(x), self.dest_shape)
return a+b
class CascadedConvBnSumRelu(nn.Module):
def __init__(self, dim, in_channels, mid_channels, out_channels, **kwargs):
super(CascadedConvBnSumRelu, self).__init__()
torch.manual_seed(2018)
self.conv = conv_module[dim](in_channels, mid_channels, bias=False, **kwargs)
self.conv1 = conv_module[dim](
mid_channels, out_channels, bias=False, padding=1, **kwargs)
self.conv2 = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
self.bn = bn_module[dim](mid_channels, eps=0.001)
self.bn1 = bn_module[dim](out_channels, eps=0.001)
self.bn2 = bn_module[dim](out_channels, eps=0.001)
def forward(self, x):
a = self.conv(x)
a = self.bn(a)
a = F.relu(a, inplace=True)
a = self.conv1(a)
a = self.bn1(a)
b = self.conv2(x)
b = self.bn2(b)
return F.relu(a.add_(b), inplace=True)
class Linear_Scalar_Binary(nn.Module):
def __init__(self, op, in_channels, out_channels, **kwargs):
super(Linear_Scalar_Binary, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
self.op = op
def forward(self, x):
return self.op(self.linear(x), 2.0)
class Linear_Tensor_Binary(nn.Module):
def __init__(self, op, in_channels, out_channels, **kwargs):
super(Linear_Tensor_Binary, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
self.op = op
self.tensor = torch.randn(out_channels)
def forward(self, x):
return self.op(self.linear(x), self.tensor)
class LinearRelu(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(LinearRelu, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
def forward(self, x):
return F.relu(self.linear(x), inplace=True)
class LinearGelu(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(LinearGelu, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
def forward(self, x):
return F.gelu(self.linear(x))
class LinearSigmoid(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(LinearSigmoid, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
def forward(self, x):
return F.sigmoid(self.linear(x))
class LinearSwish(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(LinearSwish, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
def forward(self, x):
linear_res = self.linear(x)
return F.silu(linear_res)
class LinearSwish_v1(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(LinearSwish_v1, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
def forward(self, x):
linear_res = self.linear(x)
return torch.mul(linear_res, F.sigmoid(linear_res))
class LinearAdd(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(LinearAdd, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
self.linear1 = nn.Linear(in_channels, out_channels, **kwargs)
def forward(self, x):
x1 = x.clone()
return torch.add(self.linear(x),self.linear1(x1))
class Linear_Reshape_Relu(nn.Module):
def __init__(self, in_channels, out_channels,dest_shape, **kwargs):
super(Linear_Reshape_Relu, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
self.dest_shape = dest_shape
def forward(self, x):
return F.relu(torch.reshape(self.linear(x),self.dest_shape), inplace=True)
class LinearSigmoid(nn.Module):
def __init__(self, in_channels, out_channels, **kwargs):
super(LinearSigmoid, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
def forward(self, x):
return torch.sigmoid(self.linear(x))
class LinearBn(nn.Module):
def __init__(self,dim,in_channels, out_channels, **kwargs):
super(LinearBn, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
self.bn = bn_module[dim](1, eps=0.001)
def forward(self, x):
return self.bn(self.linear(x))
class Linear_Reshape_Bn(nn.Module):
def __init__(self,dim,in_channels, out_channels,dest_shape,**kwargs):
super(Linear_Reshape_Bn, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.linear = nn.Linear(in_channels, out_channels, **kwargs)
self.bn = bn_module[dim](1, eps=0.001)
self.dest_shape = dest_shape
def forward(self, x):
return self.bn(torch.reshape(self.linear(x),self.dest_shape))
class ConvSumInDiffBlock(nn.Module):
def __init__(self, dim, in_channels, out_channels, **kwargs):
super(ConvSumInDiffBlock, self).__init__()
seed = 2018
torch.manual_seed(seed)
self.pad = (0, 0) * dim
self.conv = conv_module[dim](in_channels, out_channels, bias=False, **kwargs)
def forward(self, x):
y = self.conv(x)
if y.size(1) != x.size(1):
z= F.pad(x,
self.pad + (0, y.size(1) - x.size(1)), 'constant', 0.)
y +=z
else:
y += x
return y
class ConvSwishOutplace(nn.Module):
def __init__(self, dim, in_channels, out_channels, kernel_size, image_size):
super(ConvSwishOutplace, self).__init__()
self.conv = conv_module[dim](in_channels, out_channels, kernel_size, image_size)
def forward(self, x):
a1 = self.conv(x)
b1 = torch.sigmoid(a1)
c1 = torch.mul(a1, b1)
return c1
class ConvSwishInplace(nn.Module):
def __init__(self, dim, in_channels, out_channels, kernel_size, image_size):
super(ConvSwishInplace, self).__init__()
self.conv = conv_module[dim](in_channels, out_channels, kernel_size, image_size)
def forward(self, x):
a = self.conv(x)
b = torch.sigmoid(a)
res = a.mul_(b)
return res
class ConvSiluOutplace(nn.Module):
def __init__(self, dim, in_channels, out_channels, kernel_size, image_size):
super(ConvSiluOutplace, self).__init__()
self.conv = conv_module[dim](in_channels, out_channels, kernel_size, image_size)
self.silu = nn.SiLU()
def forward(self, x):
a1 = self.conv(x)
b1 = self.silu(a1)
return b1
class ConvSiluInplace(nn.Module):
def __init__(self, dim, in_channels, out_channels, kernel_size, image_size):
super(ConvSiluInplace, self).__init__()
self.conv = conv_module[dim](in_channels, out_channels, kernel_size, image_size)
self.silu = nn.SiLU(inplace=True)
def forward(self, x):
a1 = self.conv(x)
b1 = self.silu(a1)
return b1
class ConvSigmoidOutplace(nn.Module):
def __init__(self, dim, in_channels, out_channels, kernel_size, image_size):
super(ConvSigmoidOutplace, self).__init__()
self.conv = conv_module[dim](in_channels, out_channels, kernel_size, image_size)
def forward(self, x):
a = self.conv(x)
b = torch.sigmoid(a)
c = torch.add(b, b)
return c
class ConvSigmoidInplace(nn.Module):
def __init__(self, dim, in_channels, out_channels, kernel_size, image_size):
super(ConvSigmoidInplace, self).__init__()
self.conv = conv_module[dim](in_channels, out_channels, kernel_size, image_size)
def forward(self, x):
a = self.conv(x)
b = torch.sigmoid_(a)
c = torch.add(b, b)
return c
class ConvHardtanh(nn.Module):
def __init__(self, dim, in_channels, out_channels, kernel_size, image_size, inplace=False):
super(ConvHardtanh, self).__init__()
self.conv = conv_module[dim](in_channels, out_channels, kernel_size, image_size)
self.hardtanh = nn.Hardtanh(inplace=inplace)
def forward(self, x):
a = self.conv(x)
b = self.hardtanh(a)
c = torch.add(b, b)
return c
class ConvElu(nn.Module):
def __init__(self, dim, in_channels, out_channels, kernel_size, image_size, inplace=False):
super(ConvElu, self).__init__()
self.conv = conv_module[dim](in_channels, out_channels, kernel_size, image_size)
self.elu = nn.ELU(inplace=inplace)
def forward(self, x):
a = self.conv(x)
b = self.elu(a)
c = torch.add(b, b)
return c
class ConvTranspose2d(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, output_padding=0, groups=1, bias=True, dilation=1):
super(ConvTranspose2d, self).__init__()
self.conv_transpose2d = nn.ConvTranspose2d(in_channels, out_channels, kernel_size, stride, padding, output_padding, groups, bias, dilation)
def forward(self, x):
x = self.conv_transpose2d(x)
return x
class ChannelShuffle(nn.Module):
def __init__(self, batchsize, num_channels, height, width, groups):
super(ChannelShuffle, self).__init__()
self.batchsize = batchsize
self.num_channels = num_channels
self.height = height
self.width = width
self.groups = groups
def forward(self, x):
channels_per_group = self.num_channels // self.groups
x = x.view(self.batchsize, self.groups, channels_per_group, self.height, self.width)
x = torch.transpose(x, 1, 2).contiguous()
x = x.view(self.batchsize, -1, self.height, self.width)
return x
class MatmulDiv(nn.Module):
def __init__(self, div_scalar=False, with_out=False):
super(MatmulDiv, self).__init__()
self.div_scalar = div_scalar
self.with_out = with_out
def forward(self, x):
mm_res = None
y = torch.transpose(x, 1, 2).contiguous()
mm_res_shape = x.size()[:-1] + (y.size()[-1:])
if self.with_out:
mm_res = torch.randn(mm_res_shape, dtype=x.dtype)
torch.matmul(x, y, out=mm_res)
else:
mm_res = torch.matmul(x, y)
if self.div_scalar:
return mm_res.div(2.0)
else:
return mm_res.div(torch.ones(mm_res_shape,dtype=x.dtype)+1)
class MHAScoresCalculation(nn.Module):
def __init__(self, dim_per_head, softmax_dim=-1):
super(MHAScoresCalculation, self).__init__()
self.softmax = nn.Softmax(dim=softmax_dim)
self.dim_per_head = dim_per_head
def forward(self, mat1, mat2, bias):
mat1 = mat1 / math.sqrt(self.dim_per_head)
qk = torch.matmul(mat1, mat2.transpose(2, 3))
scores = qk + bias
return self.softmax(scores)
class DistilMHAScoresCalculation_v1(nn.Module):
def __init__(self, dim_per_head, softmax_dim=-1):
super(DistilMHAScoresCalculation_v1, self).__init__()
self.softmax = nn.Softmax(dim=softmax_dim)
self.dim_per_head = dim_per_head
def forward(self, mat1, mat2, mask):
mask_shape=[mat1.shape[0],1,1,mat1.shape[3]]
mat1 = mat1 / math.sqrt(self.dim_per_head)
qk = torch.matmul(mat1, mat2.transpose(2, 3))
mask = (mask == 0).view(mask_shape).expand_as(qk)
qk.masked_fill_(mask, -float("inf"))
return self.softmax(qk)
class DistilMHAScoresCalculation_v2(nn.Module):
def __init__(self, dim_per_head):
super(DistilMHAScoresCalculation_v2, self).__init__()
self.dim_per_head = dim_per_head
def forward(self, mat1, mat2, mask):
mask_shape=[mat1.shape[0],1,1,mat1.shape[3]]
mat1 = mat1 / math.sqrt(self.dim_per_head)
qk = torch.matmul(mat1, mat2.transpose(2, 3))
mask = (mask == 0).view(mask_shape).expand_as(qk)
qk = qk.masked_fill(mask, -float("inf"))
return nn.functional.softmax(qk, dim=-1)
class AtenSoftmaxRepalce(nn.Module):
def __init__(self, dim=-1):
super(AtenSoftmaxRepalce, self).__init__()
self.softmax = torch.nn.Softmax(dim)
def forward(self, x):
return self.softmax(x)
class AtenBatchNormRepalce(nn.Module):
def __init__(self):
super(AtenBatchNormRepalce, self).__init__()
self.bn = torch.nn.BatchNorm2d(10)
def forward(self, x):
return self.bn(x)
class AddLayerNorm(torch.nn.Module):
def __init__(self, dim=32):
super(AddLayerNorm, self).__init__()
self.layernorm = torch.nn.LayerNorm(dim)
def forward(self, x, y):
x = torch.add(x,y)
return self.layernorm(x)
class AddLayerNorm_v1(torch.nn.Module):
def __init__(self, dim=32):
super(AddLayerNorm_v1, self).__init__()
self.layernorm = torch.nn.LayerNorm(dim)
def forward(self, x, y, z):
x = x + y + z
return self.layernorm(x)
class ConcatBnRelu2d(torch.nn.Module):
def __init__(self):
super(ConcatBnRelu2d, self).__init__()
self.bn = torch.nn.BatchNorm2d(96)
self.relu = torch.nn.ReLU()
def forward(self, x1, x2, x3):
x = torch.cat((x1, x2, x3), dim = 1)
x = self.bn(x)
return self.relu(x)
class ConcatBnRelu2d_v1(torch.nn.Module):
def __init__(self):
super(ConcatBnRelu2d_v1, self).__init__()
self.bn = torch.nn.BatchNorm2d(32)
self.relu = torch.nn.ReLU()
def forward(self, x1, x2, x3):
x = torch.cat((x1, x2, x3), dim = 2)
x = self.bn(x)
return self.relu(x)
class ConcatBnRelu3d(torch.nn.Module):
def __init__(self):
super(ConcatBnRelu3d, self).__init__()
self.bn = torch.nn.BatchNorm3d(96)
self.relu = torch.nn.ReLU()
def forward(self, x1, x2, x3):
x = torch.cat((x1, x2, x3), dim = 1)
x = self.bn(x)
return self.relu(x)
class ModMultLinear(nn.Module):
def __init__(self, w1_dim, w2_dim):
super(ModMultLinear, self).__init__()
self.linear1 = nn.Linear(5, w1_dim)
self.linear2 = nn.Linear(5, w2_dim)
self.linear3 = nn.Linear(w1_dim, 5)
self.linear4 = nn.Linear(w1_dim, 5)
def forward(self, x):
res1 = self.linear1(x)
res2 = self.linear2(x)
res3 = self.linear3(res1)
res4 = self.linear4(res1)
return res1, res2, res3, res4
class DisableTexprFuser():
def __enter__(self):
self.saved = torch._C._jit_texpr_fuser_enabled()
torch._C._jit_set_texpr_fuser_enabled(False)
def __exit__(self, *args, **kwargs):
torch._C._jit_set_texpr_fuser_enabled(self.saved)
class Bottleneck_v1(nn.Module):
def __init__(self):
super(Bottleneck_v1, self).__init__()
self.conv1 = nn.Conv2d(64, 64, kernel_size=(1, 1), stride=(1, 1), bias=True)
self.conv2 = nn.Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True)
self.conv3 = nn.Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=True)
self.downsample = nn.Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=True)
def forward(self, x):
y1 = self.conv1(x).relu_()
y2 = self.conv2(y1).relu_()
y3 = self.conv3(y2)
y3 += self.downsample(x)
return y3.relu_()
class Bottleneck_v2(nn.Module):
def __init__(self):
super(Bottleneck_v2, self).__init__()
self.conv = nn.Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=True)
self.conv1 = nn.Conv2d(256, 64, kernel_size=(1, 1), stride=(1, 1), bias=True)
self.conv2 = nn.Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1), bias=True)
self.conv3 = nn.Conv2d(64, 256, kernel_size=(1, 1), stride=(1, 1), bias=True)
def forward(self, x):
x = self.conv(x)
y1 = self.conv1(x).relu_()
y2 = self.conv2(y1).relu_()
y3 = self.conv3(y2)
y3 += x
return y3.relu_()
class Tester(TestCase):
def _test_output(self, model, x, kind_in_graph=None, kind_not_in_graph=None, prec=None, levels=['O0','O1'], use_channels_last=[True, False]):
modelName = model.__class__.__name__
options = itertools.product(levels, use_channels_last)
for level, use_channels_last in options:
ipex.enable_onednn_fusion(False)
model = model.eval()
# It will be removed after jit support conv_bn folding
if level == 'O0':
try:
model = optimization.fuse(model)
except:
warnings.warn("Conv BatchNorm folding failed.")
if x.dim() == 4 and use_channels_last:
x = x.to(memory_format=torch.channels_last)
model = model.to(memory_format=torch.channels_last)
if x.dim() == 5 and use_channels_last:
x = x.to(memory_format=torch.channels_last_3d)
model = model.to(memory_format=torch.channels_last_3d)
model = ipex.optimize(model, dtype=torch.float32, level=level)
with torch.no_grad():
result = model(x)
traced_model = torch.jit.trace(model, x).eval()
traced_model = torch.jit.freeze(traced_model)
tresult = traced_model(x)
self.assertEqual(result, tresult, prec=prec)
ipex.enable_onednn_fusion(True)
with torch.no_grad():
trace_fused_model = torch.jit.trace(model, x)
trace_fused_model = torch.jit.freeze(trace_fused_model)
y = trace_fused_model(x)
# enable fusiong in ipex.
fused_tresult = trace_fused_model(x)
# conv relu fusion, conv sum fusion or conv sum relu fusion
trace_graph = trace_fused_model.graph_for(x)
fused_tresult = trace_fused_model(x)
self.assertEqual(result, fused_tresult, prec=prec)
# check if the fused node exists in the graph
if kind_in_graph is not None:
self.assertTrue(any(n.kind() == kind_in_graph for n in trace_graph.nodes()))
# check if certain node does not exist in the graph
if kind_not_in_graph is not None:
self.assertTrue(all(n.kind() != kind_not_in_graph for n in trace_graph.nodes()))
def _test_output_bf16(self, model, x, kind_in_graph=None, kind_not_in_graph=None, prec=None, levels=['O0','O1'], use_channels_last=[True, False]):
modelName = model.__class__.__name__
options = itertools.product(levels, use_channels_last)
for level, use_channels_last in options:
ipex.enable_onednn_fusion(True)
model = model.eval()
# It will be removed after jit support conv_bn folding
if level == 'O0':
try:
model = optimization.fuse(model)
except:
warnings.warn("Conv BatchNorm folding failed.")
if x.dim() == 4 and use_channels_last:
x = x.to(memory_format=torch.channels_last)
model = model.to(memory_format=torch.channels_last)
if x.dim() == 5 and use_channels_last:
x = x.to(memory_format=torch.channels_last_3d)
model = model.to(memory_format=torch.channels_last_3d)
model = ipex.optimize(model, dtype=torch.bfloat16, level=level)
x2 = x.clone()
x3 = x.clone()
with torch.cpu.amp.autocast(enabled=True, dtype=torch.bfloat16), torch.no_grad():
# bf16, native path
result = model(x)
trace_fused_model = torch.jit.trace(copy.deepcopy(model), x3)
trace_fused_model = torch.jit.freeze(trace_fused_model)
# enable fusion path.
fused_tresult = trace_fused_model(x3)
# bf16, jit trace path
trace_graph = trace_fused_model.graph_for(x3)
fused_tresult = trace_fused_model(x3)
self.assertEqual(fused_tresult, result, prec=prec)
self.assertEqual(fused_tresult.dtype, torch.bfloat16)
# check if the fused node exists in the graph
if kind_in_graph is not None:
self.assertTrue(any(n.kind() == kind_in_graph for n in trace_graph.nodes()))
# check if certain node does not exist in the graph
if kind_not_in_graph is not None:
self.assertTrue(all(n.kind() != kind_not_in_graph for n in trace_graph.nodes()))
def test_jit_freeze(self):
model = ConvBatchNorm_Fixed(2, 3, 32, kernel_size=3, stride=1).eval()
x = torch.randn(32, 3, 64, 64).to(memory_format=torch.channels_last)
model = model.to(memory_format=torch.channels_last)
model = ipex.optimize(model, dtype=torch.float32)
with torch.no_grad():
trace_model = torch.jit.trace(model, x).eval()
freeze_model = torch.jit.freeze(trace_model)
with torch.no_grad():
# enable fusiong in ipex.
result1 = trace_model(x)
result2 = freeze_model(x)
# conv relu fusion, conv sum fusion or conv sum relu fusion
trace_graph = trace_model.graph_for(x)
freeze_graph = freeze_model.graph_for(x)
node = "ipex_prepack::convolution_prepack"
# prepack op need in freeze model
self.assertTrue(all(n.kind() != node for n in freeze_graph.nodes()))
# prepack op need note in none freeze model
self.assertTrue(any(n.kind() == node for n in trace_graph.nodes()))
def test_concat_linear(self):
def check_op_count(graph_str, op_names=[]):
count = 0
node_list = graph_str.strip().split("\n")
for node in node_list:
for op_name in op_names:
if op_name in node:
count += 1
return count
origin_model = ModMultLinear(50, 60).eval()
test_val1 = torch.rand([50, 5])
# call mkl path(fp32)
model = ipex.optimize(origin_model, dtype=torch.float32)
ori_res = model(test_val1)
model_jit = torch.jit.trace(model,(test_val1))
graph_ori = str(model_jit.graph_for(test_val1))
linear_count_ori = check_op_count(graph_ori, ["aten::linear"])
self.assertEqual(linear_count_ori, 4)
model_jit = torch.jit.freeze(model_jit)
jit_res = model_jit(test_val1)
self.assertEqual(ori_res, jit_res)
graph_opt = str(model_jit.graph_for(test_val1))
linear_count_ori = check_op_count(graph_opt, ["aten::linear"])
self.assertEqual(linear_count_ori, 2)
# call onednn path(fp32)
model = ipex.optimize(origin_model, dtype=torch.float32, auto_kernel_selection=True)
ori_res = model(test_val1)
model_jit = torch.jit.trace(model,(test_val1))
graph_ori = str(model_jit.graph_for(test_val1))
linear_count_ori = check_op_count(graph_ori, ["ipex_prepack::linear_run"])
self.assertEqual(linear_count_ori, 4)
model_jit = torch.jit.freeze(model_jit)
jit_res = model_jit(test_val1)
self.assertEqual(ori_res, jit_res)
graph_opt = str(model_jit.graph_for(test_val1))
linear_count_ori = check_op_count(graph_opt, ["ipex_prepack::linear_run"])
self.assertEqual(linear_count_ori, 2)
model = ipex.optimize(origin_model, dtype=torch.bfloat16)
test_val1 = test_val1.bfloat16()
with torch.cpu.amp.autocast(), torch.no_grad():
ori_res = model(test_val1)
model_jit = torch.jit.trace(model,(test_val1))
graph_ori = str(model_jit.graph_for(test_val1))
linear_count_ori = check_op_count(graph_ori, ["ipex_prepack::linear_run"])
self.assertEqual(linear_count_ori, 4)
model_jit = torch.jit.freeze(model_jit)
model_jit(test_val1)
graph_opt = str(model_jit.graph_for(test_val1))
jit_res = model_jit(test_val1)
self.assertEqual(ori_res[1], jit_res[1])
linear_count_ori = check_op_count(graph_opt, ["ipex_prepack::linear_run"])
self.assertEqual(linear_count_ori, 2)
def test_add_layernorm(self):
bs = 56
seq_len = 384
dim = 768
a = torch.randn(bs, seq_len, dim)
b = torch.randn(bs, seq_len, dim)
model = AddLayerNorm(dim)
jit_model = torch.jit.trace(model,(a, b))
trace_graph = jit_model.graph_for(a, b)
jit_res = jit_model(a, b)
ori_res = model(a, b)
self.assertEqual(jit_res, ori_res)
node = "ipex::add_layernorm"
self.assertTrue(any(n.kind() == node for n in trace_graph.nodes()))
a_bf16 = a.to(torch.bfloat16)
b_bf16 = b.to(torch.bfloat16)
with torch.cpu.amp.autocast():
ori_res = model(a_bf16, b_bf16)
model_jit = jit_model = torch.jit.trace(model,(a, b))
trace_graph = jit_model.graph_for(a, b)
jit_res = jit_model(a_bf16, b_bf16)
node = "ipex::add_layernorm"
self.assertTrue(any(n.kind() == node for n in trace_graph.nodes()))
self.assertEqual(jit_res, ori_res, prec=5e-2)
model = AddLayerNorm_v1(dim)
c = torch.randn(bs, seq_len, dim)
jit_model = torch.jit.trace(model,(a, b, c))
trace_graph = jit_model.graph_for(a, b, c)
jit_res = jit_model(a, b, c)
ori_res = model(a, b, c)
self.assertEqual(jit_res, ori_res)
node = "ipex::add_layernorm"
self.assertTrue(any(n.kind() == node for n in trace_graph.nodes()))
def test_concat_bn_relu(self):
a1 = torch.randn(1, 32, 13, 24, dtype=torch.bfloat16).contiguous(memory_format=torch.channels_last)
a2 = torch.randn(1, 32, 13, 24, dtype=torch.bfloat16).contiguous(memory_format=torch.channels_last)
a3 = torch.randn(1, 32, 13, 24, dtype=torch.bfloat16).contiguous(memory_format=torch.channels_last)
model = ConcatBnRelu2d().eval().to(memory_format=torch.channels_last)
model = ipex.optimize(model, dtype=torch.bfloat16, level='O0')
with torch.no_grad():
jit_model = torch.jit.trace(model, (a1, a2, a3)).eval()
jit_model = torch.jit.freeze(jit_model)
#warmup run
for _ in range(2):
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 32, 13, 24, dtype=torch.float).contiguous(memory_format=torch.channels_last)
a2 = torch.randn(1, 32, 13, 24, dtype=torch.float).contiguous(memory_format=torch.channels_last)
a3 = torch.randn(1, 32, 13, 24, dtype=torch.float).contiguous(memory_format=torch.channels_last)
model = ConcatBnRelu2d_v1().eval().to(memory_format=torch.channels_last)
model = ipex.optimize(model, dtype=torch.float32, level='O0')
with torch.no_grad():
jit_model = torch.jit.trace(model, (a1, a2, a3)).eval()
jit_model = torch.jit.freeze(jit_model)
#warmup run
for _ in range(2):
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
model = ConcatBnRelu2d().eval().to(memory_format=torch.channels_last)
model = ipex.optimize(model, dtype=torch.float32, level='O0')
with torch.no_grad():
jit_model = torch.jit.trace(model, (a1, a2, a3)).eval()
jit_model = torch.jit.freeze(jit_model)
#warmup run
for _ in range(2):
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 32, 18, 53, dtype=torch.float).contiguous(memory_format=torch.channels_last)
a2 = torch.randn(1, 32, 18, 53, dtype=torch.float).contiguous(memory_format=torch.channels_last)
a3 = torch.randn(1, 32, 18, 53, dtype=torch.float).contiguous(memory_format=torch.channels_last)
with torch.no_grad():
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 16, 24, 116, dtype=torch.float).contiguous(memory_format=torch.channels_last)
a2 = torch.randn(1, 48, 24, 116, dtype=torch.float).contiguous(memory_format=torch.channels_last)
a3 = torch.randn(1, 32, 24, 116, dtype=torch.float).contiguous(memory_format=torch.channels_last)
with torch.no_grad():
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 17, 15, 24, dtype=torch.float).contiguous(memory_format=torch.channels_last)
a2 = torch.randn(1, 47, 15, 24, dtype=torch.float).contiguous(memory_format=torch.channels_last)
a3 = torch.randn(1, 32, 15, 24, dtype=torch.float).contiguous(memory_format=torch.channels_last)
with torch.no_grad():
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 32, 13, 24, dtype=torch.float)
a2 = torch.randn(1, 32, 13, 24, dtype=torch.float)
a3 = torch.randn(1, 32, 13, 24, dtype=torch.float)
with torch.no_grad():
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 32, 13, 24, 33, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
a2 = torch.randn(1, 32, 13, 24, 33, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
a3 = torch.randn(1, 32, 13, 24, 33, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
model = ConcatBnRelu3d().eval().to(memory_format=torch.channels_last_3d)
model = ipex.optimize(model, dtype=torch.float32, level='O0')
with torch.no_grad():
jit_model = torch.jit.trace(model, (a1, a2, a3)).eval()
jit_model = torch.jit.freeze(jit_model)
#warmup run
for _ in range(2):
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 16, 17, 14, 31, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
a2 = torch.randn(1, 48, 17, 14, 31, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
a3 = torch.randn(1, 32, 17, 14, 31, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
with torch.no_grad():
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 17, 13, 24, 33, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
a2 = torch.randn(1, 47, 13, 24, 33, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
a3 = torch.randn(1, 32, 13, 24, 33, dtype=torch.float).contiguous(memory_format=torch.channels_last_3d)
with torch.no_grad():
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
a1 = torch.randn(1, 32, 13, 24, 33, dtype=torch.float)
a2 = torch.randn(1, 32, 13, 24, 33, dtype=torch.float)
a3 = torch.randn(1, 32, 13, 24, 33, dtype=torch.float)
with torch.no_grad():
jit_res = jit_model(a1, a2, a3)
ori_res = model(a1, a2, a3)
self.assertEqual(jit_res, ori_res)
def test_mha_scores_calculation(self):
def _check_match_mha(trace_model, mat1, mat2, bias, node = "ipex::mha_scores_calc"):
graph = trace_model.graph_for((mat1, mat2, bias))
self.assertTrue(any(n.kind() == node for n in graph.nodes()))
def _test_pure_bf16(model, trace_model, mat1, mat2, bias, prec=3e-2):
mat1_bf16 = mat1.to(torch.bfloat16)
mat2_bf16 = mat2.to(torch.bfloat16)
bias_bf16 = bias.to(torch.bfloat16)
res_ref = model(mat1_bf16, mat2_bf16, bias_bf16)
res_jit = trace_model(mat1_bf16, mat2_bf16, bias_bf16)
self.assertEqual(res_ref, res_jit, prec=prec)
_check_match_mha(trace_model, mat1, mat2, bias)
mat1 = torch.randn(56, 16, 384, 384)
mat2 = torch.randn(56, 16, 384, 384)
bias = torch.randn(56, 16, 384, 384)
for softmax_dim in [0, 1, 2, -1]:
mha = MHAScoresCalculation(4, softmax_dim)
with torch.no_grad():
mha_jit = torch.jit.trace(mha, (mat1, mat2, bias))
mha_jit.eval()
res_ref = mha(mat1, mat2, bias)
res_jit = mha_jit(mat1, mat2, bias)
self.assertEqual(res_ref, res_jit)
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
mat1 = torch.randn(1, 1, 2, 3)
mat2 = torch.randn(1, 1, 16, 3)
bias = torch.randn(1, 1, 2, 16)
res_ref = mha(mat1, mat2, bias)
res_jit = mha_jit(mat1, mat2, bias)
self.assertEqual(res_ref, res_jit)
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
mat1 = torch.randn(1, 1, 2, 3)
mat2 = torch.randn(1, 1, 32, 3)
bias = torch.randn(1, 1, 2, 32)
res_ref = mha(mat1, mat2, bias)
res_jit = mha_jit(mat1, mat2, bias)
self.assertEqual(res_ref, res_jit)
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
mat1 = torch.randn(1, 1, 2, 3)
mat2 = torch.randn(1, 1, 33, 3)
bias = torch.randn(1, 1, 2, 33)
res_ref = mha(mat1, mat2, bias)
res_jit = mha_jit(mat1, mat2, bias)
self.assertEqual(res_ref, res_jit)
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
mat1 = torch.randn(2, 3, 4, 6)
mat2 = torch.randn(2, 3, 6, 6)
bias = torch.randn(2, 3, 4, 6)
res_ref = mha(mat1, mat2, bias)
res_jit = mha_jit(mat1, mat2, bias)
self.assertEqual(res_ref, res_jit)
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
# Test broadcast
mat1 = torch.randn(2, 3, 4, 10)
mat2 = torch.randn(2, 3, 16, 10)
bias = torch.randn(1, 1, 1, 16)
self.assertEqual(mha(mat1, mat2, bias), mha_jit(mat1, mat2, bias))
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
bias = torch.randn(4, 16)
self.assertEqual(mha(mat1, mat2, bias), mha_jit(mat1, mat2, bias))
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
bias = torch.randn(3, 1, 1)
self.assertEqual(mha(mat1, mat2, bias), mha_jit(mat1, mat2, bias))
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
bias = torch.randn(2, 1, 1, 1)
self.assertEqual(mha(mat1, mat2, bias), mha_jit(mat1, mat2, bias))
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
bias = torch.randn(3, 4, 16)
self.assertEqual(mha(mat1, mat2, bias), mha_jit(mat1, mat2, bias))
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
bias = torch.randn(2, 1, 1, 16)
self.assertEqual(mha(mat1, mat2, bias), mha_jit(mat1, mat2, bias))
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
bias = torch.randn(2, 1, 4, 16)
self.assertEqual(mha(mat1, mat2, bias), mha_jit(mat1, mat2, bias))
_check_match_mha(mha_jit, mat1, mat2, bias)
_test_pure_bf16(mha, mha_jit, mat1, mat2, bias)
def test_distil_mha_scores_calculation(self):
def _check_match_mha(trace_model, mat1, mat2, mask, node = "ipex::distil_mha_scores_calc"):
graph = trace_model.graph_for((mat1, mat2, mask))
self.assertTrue(any(n.kind() == node for n in graph.nodes()))
def _test_pure_bf16(model, trace_model, mat1, mat2, mask, prec=3e-2):
mat1_bf16 = mat1.to(torch.bfloat16)
mat2_bf16 = mat2.to(torch.bfloat16)
bias_bf16 = mask.to(torch.bfloat16)
res_ref = model(mat1_bf16, mat2_bf16, bias_bf16)
res_jit = trace_model(mat1_bf16, mat2_bf16, bias_bf16)
self.assertEqual(res_ref, res_jit, prec=prec)
_check_match_mha(trace_model, mat1, mat2, mask)
mat1 = torch.randn(56, 12, 384, 384)
mat2 = torch.randn(56, 12, 384, 384)
mask = torch.randn(56, 384)
mask = (mask > 0.5)
mha_v1 = DistilMHAScoresCalculation_v1(4, -1)
with torch.no_grad():
mha_jit = torch.jit.trace(mha_v1, (mat1, mat2, mask))
mha_jit.eval()
res_ref = mha_v1(mat1, mat2, mask)
res_jit = mha_jit(mat1, mat2, mask)
self.assertEqual(res_ref, res_jit)
_check_match_mha(mha_jit, mat1, mat2, mask)
_test_pure_bf16(mha_v1, mha_jit, mat1, mat2, mask)
mha_v2 = DistilMHAScoresCalculation_v2(4)
with torch.no_grad():
mha_jit = torch.jit.trace(mha_v2, (mat1, mat2, mask))
mha_jit.eval()
res_ref = mha_v2(mat1, mat2, mask)
res_jit = mha_jit(mat1, mat2, mask)
self.assertEqual(res_ref, res_jit)
_check_match_mha(mha_jit, mat1, mat2, mask)
_test_pure_bf16(mha_v1, mha_jit, mat1, mat2, mask)
def test_conv_fusion(self):
batch_size = 8
out_channels = 16
in_channels = 3
kernel_size = 3
image_size = 16
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
'''
self._test_output(
ConvSwishOutplace(in_channels, out_channels, kernel_size, image_size),
torch.randn(batch_size, in_channels, image_size, image_size),
kind_in_graph="ipex_prepack::convolution_swish_run")
'''
self._test_output_bf16(
ConvSwishOutplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_swish_run",
prec=0.02)
self._test_output(
ConvSwishInplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_swish_run")
self._test_output_bf16(
ConvSwishInplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_swish_run",
prec=0.02)
self._test_output(
ConvSiluOutplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_swish_run")
self._test_output(
ConvSiluInplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_swish_run")
self._test_output_bf16(
ConvSiluOutplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_swish_run",
prec=0.02)
self._test_output_bf16(
ConvSiluInplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_swish_run",
prec=0.02)
self._test_output(
ConvSigmoidOutplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_sigmoid_run")
self._test_output_bf16(
ConvSigmoidOutplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_sigmoid_run",
prec=0.02)
self._test_output(
ConvSigmoidInplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_sigmoid_run")
self._test_output_bf16(
ConvSigmoidInplace(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_sigmoid_run",
prec=0.02)
self._test_output(
ConvHardtanh(dim, in_channels, out_channels, kernel_size, image_size, True),
x,
kind_in_graph="ipex_prepack::convolution_hardtanh_run")
self._test_output_bf16(
ConvHardtanh(dim, in_channels, out_channels, kernel_size, image_size, True),
x,
kind_in_graph="ipex_prepack::convolution_hardtanh_run",
prec=0.02)
self._test_output(
ConvHardtanh(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_hardtanh_run")
self._test_output_bf16(
ConvHardtanh(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_hardtanh_run",
prec=0.02)
self._test_output(
ConvElu(dim, in_channels, out_channels, kernel_size, image_size, True),
x,
kind_in_graph="ipex_prepack::convolution_elu_run")
# self._test_output_bf16(
# ConvElu(in_channels, out_channels, kernel_size, image_size, True),
# torch.randn(batch_size, in_channels, image_size, image_size),
# kind_in_graph="ipex::conv2d_elu",
# prec=0.02)
self._test_output(
ConvElu(dim, in_channels, out_channels, kernel_size, image_size),
x,
kind_in_graph="ipex_prepack::convolution_elu_run")
# self._test_output_bf16(
# ConvElu(in_channels, out_channels, kernel_size, image_size),
# torch.randn(batch_size, in_channels, image_size, image_size),
# kind_in_graph="ipex::conv2d_elu",
# prec=0.02)
def test_output_conv_bn(self):
batch_size = 8
out_channels = 16
in_channels = 3
kernel_size = 3
image_size = 16
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
ConvBatchNorm_Fixed(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="ipex::batch_norm",
levels=['O1'])
self._test_output_bf16(
ConvBatchNorm_Fixed(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="ipex::batch_norm",
prec=0.02,
levels=['O1'])
def test_output_bn_conv(self):
batch_size = 8
out_channels = 16
in_channels = 3
kernel_size = 3
image_size = 16
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
BatchNormConv_Fixed(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex::batch_norm",
kind_not_in_graph=None)
def test_output_bn_conv_bn(self):
batch_size = 8
out_channels = 16
in_channels = 3
kernel_size = 3
image_size = 16
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
BatchNorm_Conv_BatchNorm(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex::batch_norm",
kind_not_in_graph=None)
def test_output_conv_reshape_bn(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
dst_shape = [16, 16, 62, 62]
if dim == 3:
dst_shape.append(62)
self._test_output(
ConvReshapeBatchNorm(dim, in_channels, out_channels, dst_shape, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex::batch_norm",
kind_not_in_graph=None)
def test_output_conv_conv_concate(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
Conv_Conv_Concat(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph=None)
def test_output_conv_relu_add(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
Conv_Relu_Add(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_relu_run")
def test_output_conv_scalar_binary(self):
for bias in [True, False]:
self._test_output(
Conv_Scalar_Binary(torch.add, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::add")
self._test_output(
Conv_Scalar_Binary(torch.sub, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::sub")
self._test_output(
Conv_Scalar_Binary(torch.mul, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::mul")
self._test_output(
Conv_Scalar_Binary(torch.div, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::div")
self._test_output_bf16(
Conv_Scalar_Binary(torch.add, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::add",
prec=0.1)
self._test_output_bf16(
Conv_Scalar_Binary(torch.sub, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::sub",
prec=0.1)
self._test_output_bf16(
Conv_Scalar_Binary(torch.mul, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::mul",
prec=0.1)
self._test_output_bf16(
Conv_Scalar_Binary(torch.div, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::div",
prec=0.1)
def test_output_conv_scalar_binary_add(self):
for bias in [True, False]:
self._test_output(
Conv_Scalar_Binary_Add(torch.add, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::add")
self._test_output(
Conv_Scalar_Binary_Add(torch.sub, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::sub")
self._test_output(
Conv_Scalar_Binary_Add(torch.mul, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::mul")
self._test_output(
Conv_Scalar_Binary_Add(torch.div, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::div")
self._test_output_bf16(
Conv_Scalar_Binary_Add(torch.add, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::add",
prec=0.1)
self._test_output_bf16(
Conv_Scalar_Binary_Add(torch.sub, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::sub",
prec=0.1)
self._test_output_bf16(
Conv_Scalar_Binary_Add(torch.mul, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::mul",
prec=0.1)
self._test_output_bf16(
Conv_Scalar_Binary_Add(torch.div, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::div",
prec=0.1)
def test_output_conv_tensor_binary(self):
for bias in [True, False]:
self._test_output(
Conv_Tensor_Binary(torch.add, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::add")
self._test_output(
Conv_Tensor_Binary(torch.sub, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::sub")
self._test_output(
Conv_Tensor_Binary(torch.mul, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::mul")
self._test_output(
Conv_Tensor_Binary(torch.div, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::div",
prec=2e-5)
self._test_output_bf16(
Conv_Tensor_Binary(torch.add, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::add",
prec=0.1)
self._test_output_bf16(
Conv_Tensor_Binary(torch.sub, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::sub",
prec=0.1)
self._test_output_bf16(
Conv_Tensor_Binary(torch.mul, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::mul",
prec=0.1)
self._test_output_bf16(
Conv_Tensor_Binary(torch.div, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="aten::div",
prec=0.5)
def test_output_conv_tensor_binary_add(self):
for bias in [True, False]:
self._test_output(
Conv_Tensor_Binary_Add(torch.add, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::add")
self._test_output(
Conv_Tensor_Binary_Add(torch.sub, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::sub")
self._test_output(
Conv_Tensor_Binary_Add(torch.mul, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::mul")
self._test_output(
Conv_Tensor_Binary_Add(torch.div, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::div",
prec=2e-5)
self._test_output_bf16(
Conv_Tensor_Binary_Add(torch.add, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::add",
prec=0.1)
self._test_output_bf16(
Conv_Tensor_Binary_Add(torch.sub, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::sub",
prec=0.1)
self._test_output_bf16(
Conv_Tensor_Binary_Add(torch.mul, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::mul",
prec=0.1)
self._test_output_bf16(
Conv_Tensor_Binary_Add(torch.div, 2, 3, 32, kernel_size=3, stride=1, bias=bias),
torch.randn(32, 3, 64, 64),
kind_in_graph="ipex_prepack::convolution_add_run",
kind_not_in_graph="aten::div",
prec=0.5)
def test_output_conv_bn_relu(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
Conv_Bn_Relu(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_relu_run")
def test_output_conv_reshape_relu(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
dst_shape = [16, 16, 62, 62]
if dim == 3:
dst_shape.append(62)
self._test_output(
ConvReshapeRelu(dim, in_channels, out_channels, dst_shape, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="ipex_prepack::convolution_relu_run")
def test_output_conv_reshape_sum(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
dst_shape = [16, 16, 62, 62]
if dim == 3:
dst_shape.append(62)
self._test_output(
ConvReshapeSum(dim, in_channels, out_channels, dst_shape, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="ipex_prepack::convolution_add_run")
def test_output_conv_relu(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
ConvRelu_Fixed(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_relu_run")
self._test_output_bf16(
ConvRelu_Fixed(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_relu_run",
prec=0.02)
def test_output_conv_sum(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
ConvSum(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_add_run")
self._test_output_bf16(
ConvSum(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_add_run",
prec=0.1)
# add outputs' have different data format
m = ConvSum(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1).eval()
if dim == 2:
m.conv = m.conv.to(memory_format=torch.torch.channels_last)
else:
m.conv = m.conv.to(memory_format=torch.torch.channels_last_3d)
self._test_output(
m,
x,
kind_in_graph="ipex_prepack::convolution_add_run",
use_channels_last=[False])
m = ConvSum(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1).eval()
if dim == 2:
m.conv = m.conv.to(memory_format=torch.channels_last)
else:
m.conv = m.conv.to(memory_format=torch.channels_last_3d)
self._test_output_bf16(
m,
x,
kind_in_graph="ipex_prepack::convolution_add_run",
prec=0.1,
use_channels_last=[False])
def test_output_conv_scalar_sum(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
ConvScalarSum(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="ipex_prepack::convolution_add_run")
self._test_output_bf16(
ConvScalarSum(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="ipex_prepack::convolution_add_run",
prec=0.1)
def test_output_conv_broadcast_sum(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
ConvBroadcastSum(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="ipex_prepack::convolution_add_run")
self._test_output_bf16(
ConvBroadcastSum(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_run",
kind_not_in_graph="ipex_prepack::convolution_add_run",
prec=0.1)
def test_output_cascaded_conv_bn_sum_relu(self):
batch_size = 8
mid_channels = 64
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
CascadedConvBnSumRelu(dim, in_channels, mid_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_add_relu_run",
kind_not_in_graph="ipex::batch_norm")
self._test_output_bf16(
CascadedConvBnSumRelu(dim, in_channels, mid_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_add_relu_run",
kind_not_in_graph="ipex::batch_norm",
prec=0.02)
def test_bottleneck_fusion(self):
x1 = torch.randn(1, 64, 56, 56)
self._test_output(
Bottleneck_v1(),
x1,
kind_in_graph="ipex_prepack::convolution_bottleneck_run",
use_channels_last=[True],
levels=['O1'])
self._test_output_bf16(
Bottleneck_v1(),
x1,
kind_in_graph="ipex_prepack::convolution_bottleneck_run",
prec=0.03,
use_channels_last=[True],
levels=['O1'])
self._test_output(
Bottleneck_v2(),
x1,
kind_in_graph="ipex_prepack::convolution_bottleneck_run",
use_channels_last=[True],
levels=['O1'])
self._test_output_bf16(
Bottleneck_v2(),
x1,
kind_in_graph="ipex_prepack::convolution_bottleneck_run",
prec=0.03,
use_channels_last=[True],
levels=['O1'])
def test_jit_conv_sum_in_diff_block(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 1
image_size = 64
for dim in [2, 3]:
input_size = [batch_size, in_channels, image_size, image_size]
if dim == 3:
input_size.append(image_size)
x = torch.randn(input_size)
self._test_output(
ConvSumInDiffBlock(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1, padding=0),
x,
kind_not_in_graph="ipex_prepack::convolution_add_run")
self._test_output_bf16(
ConvSumInDiffBlock(dim, in_channels, out_channels, kernel_size=kernel_size, stride=1, padding=0),
x,
kind_not_in_graph="ipex_prepack::convolution_add_run")
def test_output_conv_transpose2d(self):
def _deconv_params_list():
params_dict = {
"input_height": [12],
"input_width": [12],
"input_depth": [12],
"input_channel_per_group": [15],
"output_channel_per_group": [3],
"kernel_size": [3],
"bias": [True, False],
"stride": [1, 2],
"padding": [1, 2],
"output_padding": [0], # TODO: fix output_padding >1.
"groups": [1, 2],
"dilation": [1, 2],
}
params_list = []
for key, value in params_dict.items():
params_list.append(value)
return params_list
def _deconv_with_output_padding():
params_dict = {
"input_height": 8,
"input_width": 8,
"input_depth": 8,
"input_channel_per_group": 10,
"output_channel_per_group": 10,
"kernel_size": 3,
"bias": False,
"stride": 2,
"padding": 1,
"output_padding": 2,
"groups": 1,
"dilation": 3,
}
params_list = []
for key, value in params_dict.items():
params_list.append(value)
return params_list
params_list = _deconv_params_list()
for input_width, input_height, input_depth, input_channel_per_group, output_channel_per_group, kernel_size, bias, stride, padding, output_padding, groups, dilation in list(itertools.product(*params_list)) + [_deconv_with_output_padding()]:
if (output_padding < stride or output_padding < dilation) \
and ((input_height - 1) * stride - 2 * padding + dilation * (kernel_size - 1) + output_padding + 1 > 0) \
and ((input_width - 1) * stride - 2 * padding + dilation * (kernel_size - 1) + output_padding + 1 > 0) \
and ((input_depth - 1) * stride - 2 * padding + dilation * (kernel_size - 1) + output_padding + 1 > 0):
ic = input_channel_per_group * groups
oc = output_channel_per_group * groups
x = torch.randn(2, ic, input_height, input_width)
model = ConvTranspose2d(ic, oc, kernel_size, stride, padding, output_padding, groups, bias, dilation)
self._test_output(
model,
x,
kind_in_graph="ipex_prepack::conv_transpose2d_run",
kind_not_in_graph="aten::conv_transpose2d",
levels=["O0"])
self._test_output_bf16(
model,
x,
kind_in_graph="ipex_prepack::conv_transpose2d_run",
kind_not_in_graph="aten::conv_transpose2d",
levels=["O0"],
prec=0.02)
self._test_output(
model,
x,
kind_in_graph="ipex_prepack::conv_transpose2d_run",
kind_not_in_graph="torch_ipex::conv_transpose2d",
levels=["O1"])
self._test_output_bf16(
model,
x,
kind_in_graph="ipex_prepack::conv_transpose2d_run",
kind_not_in_graph="torch_ipex::conv_transpose2d",
levels=["O1"],
prec=0.02)
def test_linear_auto_kernel_selection_fp32(self):
x = torch.rand(32, 3)
options = itertools.product(['O0', 'O1'], [True, False])
for level, auto_select_kernel in options:
model = LinearRelu(3, 32, bias=True).eval()
model = ipex.optimize(model, dtype=torch.float32, level=level, auto_kernel_selection=auto_select_kernel)
with torch.no_grad():
traced_model = torch.jit.trace(model, x).eval()
traced_model = torch.jit.freeze(traced_model)
y = traced_model(x)
trace_graph = traced_model.graph_for(x)
if auto_select_kernel and level == 'O1':
# for 'O1' and auto_select_kernel is True, we will use ipex linear
self.assertTrue(any(n.kind() == 'ipex_prepack::linear_relu_run' for n in trace_graph.nodes()))
else:
# for 'O1' and auto_select_kernel is false or 'O0', we will use mkl linear
self.assertTrue(any(n.kind() == 'aten::linear' for n in trace_graph.nodes()))
def test_linear_auto_kernel_selection_bf16(self):
x = torch.rand(32, 3)
options = itertools.product(['O0', 'O1'], [True, False])
for level, auto_select_kernel in options:
model = LinearRelu(3, 32, bias=True).eval()
model = ipex.optimize(model, dtype=torch.bfloat16, level=level, auto_kernel_selection=auto_select_kernel)
with torch.cpu.amp.autocast(), torch.no_grad():
traced_model = torch.jit.trace(model, x).eval()
traced_model = torch.jit.freeze(traced_model)
y = traced_model(x)
trace_graph = traced_model.graph_for(x)
# for bfloat16 path, we will use ipex linear for 'O0' and 'O1'
self.assertTrue(any(n.kind() == 'ipex_prepack::linear_relu_run' for n in trace_graph.nodes()))
def test_output_linear_scalar_binary(self):
for bias in [True, False]:
self._test_output(
Linear_Scalar_Binary(torch.add, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="aten::linear",
kind_not_in_graph="aten::add")
self._test_output(
Linear_Scalar_Binary(torch.sub, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="aten::linear",
kind_not_in_graph="aten::sub")
self._test_output(
Linear_Scalar_Binary(torch.mul, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="aten::linear",
kind_not_in_graph="aten::mul")
self._test_output(
Linear_Scalar_Binary(torch.div, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="aten::linear",
kind_not_in_graph="aten::div")
self._test_output_bf16(
Linear_Scalar_Binary(torch.add, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="ipex_prepack::linear_run",
kind_not_in_graph="aten::add",
prec=0.1)
self._test_output_bf16(
Linear_Scalar_Binary(torch.sub, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="ipex_prepack::linear_run",
kind_not_in_graph="aten::sub",
prec=0.1)
self._test_output_bf16(
Linear_Scalar_Binary(torch.mul, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="ipex_prepack::linear_run",
kind_not_in_graph="aten::mul",
prec=0.1)
self._test_output_bf16(
Linear_Scalar_Binary(torch.div, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="ipex_prepack::linear_run",
kind_not_in_graph="aten::div",
prec=0.1)
def test_output_linear_tensor_binary(self):
for bias in [True, False]:
self._test_output(
Linear_Tensor_Binary(torch.add, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="aten::linear",
kind_not_in_graph="aten::add")
self._test_output(
Linear_Tensor_Binary(torch.sub, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="aten::linear",
kind_not_in_graph="aten::sub")
self._test_output(
Linear_Tensor_Binary(torch.mul, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="aten::linear",
kind_not_in_graph="aten::mul")
self._test_output(
Linear_Tensor_Binary(torch.div, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="aten::linear",
kind_not_in_graph="aten::div")
self._test_output_bf16(
Linear_Tensor_Binary(torch.add, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="ipex_prepack::linear_run",
kind_not_in_graph="aten::add",
prec=0.1)
self._test_output_bf16(
Linear_Tensor_Binary(torch.sub, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="ipex_prepack::linear_run",
kind_not_in_graph="aten::sub",
prec=0.1)
self._test_output_bf16(
Linear_Tensor_Binary(torch.mul, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="ipex_prepack::linear_run",
kind_not_in_graph="aten::mul",
prec=0.1)
self._test_output_bf16(
Linear_Tensor_Binary(torch.div, 3, 32, bias=bias),
torch.randn(52, 3),
kind_in_graph="ipex_prepack::linear_run",
kind_not_in_graph="aten::div",
prec=0.2)
def test_output_linear_relu(self):
self._test_output(
LinearRelu(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearRelu(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_relu_run",
prec=0.02)
self._test_output(
LinearRelu(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearRelu(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_relu_run",
prec=0.02)
def test_output_linear_add(self):
self._test_output(
LinearAdd(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="aten::linear")
def test_output_linear_reshape_relu(self):
self._test_output(
Linear_Reshape_Relu(3, 32,(64,16),bias=True),
torch.rand(32, 3),
kind_in_graph="aten::linear")
def test_output_linear_sigmoid(self):
self._test_output(
LinearSigmoid(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearSigmoid(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_run",
prec=0.02)
def test_output_linear_bn(self):
self._test_output(
LinearBn(2 ,32, 32, bias=True),
torch.rand(1, 1, 32, 32),
kind_in_graph="aten::linear")
def test_output_linear_reshape_bn(self):
self._test_output(
Linear_Reshape_Bn(2 ,32, 32,(1,1,64,16),bias=True),
torch.rand(1, 1, 32, 32),
kind_in_graph="aten::linear")
def test_output_linear_gelu(self):
self._test_output(
LinearGelu(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearGelu(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_gelu_run",
prec=5e-3)
self._test_output(
LinearGelu(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearGelu(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_gelu_run",
prec=5e-3)
def test_output_linear_swish(self):
self._test_output(
LinearSwish_v1(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearSwish_v1(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_swish_run",
prec=5e-3)
self._test_output(
LinearSwish_v1(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearSwish_v1(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_swish_run",
prec=5e-3)
self._test_output(
LinearSwish(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearSwish(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_swish_run",
prec=5e-3)
self._test_output(
LinearSwish(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearSwish(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_swish_run", prec=5e-3)
def test_output_linear_sigmoid(self):
self._test_output(
LinearSigmoid(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearSigmoid(3, 32, bias=True),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_sigmoid_run",
prec=5e-3)
self._test_output(
LinearSigmoid(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="aten::linear")
self._test_output_bf16(
LinearSigmoid(3, 32, bias=False),
torch.rand(32, 3),
kind_in_graph="ipex_prepack::linear_sigmoid_run",
prec=5e-3)
def test_channel_shuffle(self):
self._test_output(
ChannelShuffle(10, 16, 50, 50, 4),
torch.rand(10, 16, 50, 50),
kind_in_graph="ipex::shuffle_2d")
def test_jit_function(self):
# test hool trace and script can works for function
def fn(input, weight, bias):
return F.linear(input, weight, bias)
input = torch.randn(2, 4)
weight = torch.randn(5, 4)
bias = torch.randn(5)
result = fn(input, weight, bias)
scripted_fn = torch.jit.script(fn)
traced_fn = torch.jit.trace(fn, (input, weight, bias))
self.assertEqual(scripted_fn(input, weight, bias), result)
self.assertEqual(traced_fn(input, weight, bias), result)
def test_matmul_div(self):
self._test_output(
MatmulDiv(div_scalar=True, with_out=True),
torch.randn(10, 3, 4),
kind_in_graph="ipex::matmul_div",
kind_not_in_graph=None)
self._test_output(
MatmulDiv(div_scalar=True, with_out=False),
torch.randn(10, 3, 4),
kind_in_graph="ipex::matmul_div",
kind_not_in_graph=None)
self._test_output(
MatmulDiv(div_scalar=False, with_out=False),
torch.randn(10, 3, 4),
kind_in_graph="ipex::matmul_div",
kind_not_in_graph=None)
self._test_output(
MatmulDiv(div_scalar=False, with_out=True),
torch.randn(10, 3, 4),
kind_in_graph="ipex::matmul_div",
kind_not_in_graph=None)
self._test_output_bf16(
MatmulDiv(div_scalar=True, with_out=True),
torch.randn(10, 3, 4, dtype=torch.bfloat16),
kind_in_graph="ipex::matmul_div",
kind_not_in_graph=None,
prec=5e-2)
self._test_output_bf16(
MatmulDiv(div_scalar=True, with_out=False),
torch.randn(10, 3, 4, dtype=torch.bfloat16),
kind_in_graph="ipex::matmul_div",
kind_not_in_graph=None,
prec=5e-2)
self._test_output_bf16(
MatmulDiv(div_scalar=False, with_out=True),
torch.randn(10, 3, 4, dtype=torch.bfloat16),
kind_in_graph="ipex::matmul_div",
kind_not_in_graph=None,
prec=5e-3)
self._test_output_bf16(
MatmulDiv(div_scalar=False, with_out=False),
torch.randn(10, 3, 4, dtype=torch.bfloat16),
kind_in_graph="ipex::matmul_div",
kind_not_in_graph=None,
prec=5e-3)
def test_ipex_softmax(self):
self._test_output(
AtenSoftmaxRepalce(),
torch.rand(3, 4, 4),
kind_in_graph="ipex::softmax")
self._test_output_bf16(
AtenSoftmaxRepalce(),
torch.rand(3, 4, 4, dtype=torch.bfloat16),
kind_in_graph="ipex::softmax",
prec=5e-3)
def test_ipex_batch_norm(self):
self._test_output(
AtenBatchNormRepalce(),
torch.rand(10, 10, 4, 4),
kind_in_graph="ipex::batch_norm")
self._test_output_bf16(
AtenBatchNormRepalce(),
torch.rand(10, 10, 4, 4, dtype=torch.bfloat16),
kind_in_graph="ipex::batch_norm",
prec=5e-3)
def test_restore_and_enable_inplace(self):
class M(nn.Module):
def __init__(self, eltwise_fn, params_dict={}):
super(M, self).__init__()
self.conv = torch.nn.Conv2d(3, 5, 3, 3)
self.eltwise = eltwise_fn
self.params_dict = params_dict
def forward(self, x):
x = x * 3.1
x = self.eltwise(x, **self.params_dict)
x = self.conv(x)
return x
for eltwise in ['sigmoid', 'tanh', 'celu', 'elu', 'hardsigmoid', 'hardswish', 'hardtanh', 'leaky_relu', 'relu6', 'relu', 'rrelu', 'selu', 'silu', 'clamp']:
eltwise_fn_name = eltwise + '_'
if eltwise in ['sigmoid', 'tanh', 'celu', 'relu', 'rrelu', 'selu']:
# use torch.sigmoid_(x)
eltwise_fn = getattr(torch, eltwise_fn_name)
eltwise_fn_outplace = getattr(torch, eltwise)
m = M(eltwise_fn)
m_outplace = M(eltwise_fn_outplace)
elif eltwise == 'clamp':
eltwise_fn = getattr(torch, eltwise_fn_name)
m = M(eltwise_fn, {"min": 0, "max": 2})
else:
# use F.elu(x, inplace=True)
eltwise_fn = getattr(F, eltwise)
m = M(eltwise_fn, {"inplace": True})
m_outplace = M(eltwise_fn)
with torch.no_grad():
m.eval()
m_outplace.eval()
x = torch.randn(1, 3, 16, 16)
x_outplace = torch.randn(1, 3, 16, 16)
# test restore inplace
traced = torch.jit.trace(m, x)
trace_graph = traced.graph_for(x)
self.assertTrue(any(n.kind() == "aten::" + eltwise_fn_name for n in trace_graph.nodes()))
y = m(x)
traced_y = traced(x)
self.assertEqual(y, traced_y)
# test enable inplace
if eltwise == 'clamp':
continue
traced_outplace = torch.jit.trace(m_outplace, x_outplace)
trace_graph_outplace = traced_outplace.graph_for(x_outplace)
self.assertTrue(any(n.kind() == "aten::" + eltwise_fn_name for n in trace_graph_outplace.nodes()))
y_outplace = m_outplace(x_outplace)
traced_y_outplace = traced_outplace(x_outplace)
self.assertEqual(y_outplace, traced_y_outplace)
def test_remove_bailout(self):
batch_size = 8
out_channels = 32
in_channels = 3
kernel_size = 3
image_size = 64
input_size = [batch_size, in_channels, image_size, image_size]
x = torch.randn(input_size)
with DisableTexprFuser():
self._test_output(
ConvRelu_Fixed(2, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_relu_run",
kind_not_in_graph="prim::BailOut")
self._test_output_bf16(
ConvRelu_Fixed(2, in_channels, out_channels, kernel_size=kernel_size, stride=1),
x,
kind_in_graph="ipex_prepack::convolution_relu_run",
kind_not_in_graph="prim::BailOut")
if __name__ == '__main__':
torch.manual_seed(2020)
test = unittest.main()
| 41.138614
| 247
| 0.590654
| 12,996
| 99,720
| 4.231302
| 0.041705
| 0.029405
| 0.029005
| 0.050027
| 0.836261
| 0.808674
| 0.784524
| 0.763848
| 0.752464
| 0.737752
| 0
| 0.042588
| 0.298536
| 99,720
| 2,423
| 248
| 41.155592
| 0.743545
| 0.015243
| 0
| 0.704846
| 0
| 0
| 0.056016
| 0.039315
| 0
| 0
| 0
| 0.000413
| 0.030837
| 1
| 0.08419
| false
| 0
| 0.011258
| 0.011747
| 0.153696
| 0.000489
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6796202f23cc11130b9c55c1409fe898a8449234
| 153
|
py
|
Python
|
oapispec/__init__.py
|
rayepps/swaggerf
|
c83ee18be2d0024091a1f4ea461b81dc5d934aca
|
[
"BSD-3-Clause"
] | 2
|
2020-02-06T05:46:18.000Z
|
2021-05-19T19:19:45.000Z
|
oapispec/__init__.py
|
rayepps/swaggerf
|
c83ee18be2d0024091a1f4ea461b81dc5d934aca
|
[
"BSD-3-Clause"
] | 13
|
2020-01-23T07:31:46.000Z
|
2021-05-25T05:39:49.000Z
|
oapispec/__init__.py
|
rayepps/swaggerf
|
c83ee18be2d0024091a1f4ea461b81dc5d934aca
|
[
"BSD-3-Clause"
] | 1
|
2021-05-18T05:46:05.000Z
|
2021-05-18T05:46:05.000Z
|
from oapispec.version import VERSION
from oapispec import model
from oapispec.schema import schema
from oapispec import doc
from oapispec import fields
| 21.857143
| 36
| 0.849673
| 22
| 153
| 5.909091
| 0.363636
| 0.461538
| 0.415385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.137255
| 153
| 6
| 37
| 25.5
| 0.984848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
db1cef36fc7036e18722f31018f7760f224d100d
| 812
|
py
|
Python
|
tests/unit/test_order.py
|
nicoloridulfo/Order-Matching-Engine
|
32c9b4d03099d5baab0d71ad214206c7595086ba
|
[
"MIT"
] | 33
|
2020-03-17T19:23:21.000Z
|
2022-03-29T06:24:47.000Z
|
tests/unit/test_order.py
|
jiangtiantu/Order-Matching-Engine
|
011fd99bfd6802580f49c1e7067394c74d0e9516
|
[
"MIT"
] | 5
|
2020-03-24T06:45:18.000Z
|
2022-03-29T16:52:35.000Z
|
tests/unit/test_order.py
|
jiangtiantu/Order-Matching-Engine
|
011fd99bfd6802580f49c1e7067394c74d0e9516
|
[
"MIT"
] | 12
|
2020-03-18T15:43:49.000Z
|
2022-01-20T21:05:13.000Z
|
from OrderMatchingEngine import *
def test_initialStates():
order = Order(1)
assert isinstance(order.order_id, int)
assert isinstance(order.time, int)
assert order.order_id == 1
order = CancelOrder(1)
assert isinstance(order.order_id, int)
assert isinstance(order.time, int)
assert order.order_id == 1
order = MarketOrder(1, Side.BUY, 10)
assert isinstance(order.order_id, int)
assert isinstance(order.time, int)
assert order.order_id == 1
assert order.side == Side.BUY
assert order.size == 10
order = LimitOrder(1, Side.BUY, 10, 100)
assert isinstance(order.order_id, int)
assert isinstance(order.time, int)
assert order.order_id == 1
assert order.side == Side.BUY
assert order.size == 10
assert order.price == 100
| 25.375
| 44
| 0.676108
| 111
| 812
| 4.864865
| 0.198198
| 0.166667
| 0.311111
| 0.192593
| 0.744444
| 0.744444
| 0.744444
| 0.744444
| 0.744444
| 0.744444
| 0
| 0.034755
| 0.220443
| 812
| 31
| 45
| 26.193548
| 0.818325
| 0
| 0
| 0.695652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.73913
| 1
| 0.043478
| false
| 0
| 0.043478
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
db2e9a57cbbc51af0b539e6e575ca69b71a0dd4a
| 64
|
py
|
Python
|
test/run/t246.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t246.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t246.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
print type(4)
print type(444444444444444444444)
print type(4.5)
| 16
| 33
| 0.796875
| 10
| 64
| 5.1
| 0.5
| 0.529412
| 0.392157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.413793
| 0.09375
| 64
| 3
| 34
| 21.333333
| 0.465517
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
c02702f519137a6bd2ce02cac53c42860f77039b
| 13,407
|
py
|
Python
|
tasks/im-hiding/flags_and_teams.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | 3
|
2021-03-30T06:27:58.000Z
|
2021-04-03T17:56:35.000Z
|
tasks/im-hiding/flags_and_teams.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
tasks/im-hiding/flags_and_teams.py
|
irdkwmnsb/lkshl-ctf
|
e5c0200ddc8ba73df5f321b87b9763fb1bbaba57
|
[
"MIT"
] | null | null | null |
data = {'GoqMLRDAbGWn': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_lmpFmMCm}', 'FChsdhyFoJb7': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_OuEgWt8G}', 'cxjQDMhRaGEi': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_OnKirjt3}', '6R9gh04xijvL': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_KOodydBm}', '6XNGpyFKD8UI': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_E0c1sc1k}', 'z9U4w5M6iAhr': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_wJdqBIFZ}', 'zLxUq06hQZas': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_rW01tYwA}', 'W9V8ydNaojIj': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_UP544cZN}', 'MH8QOCHGUH7L': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_0Wigfnq7}', 'Y0dtX9sn2wBF': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_HLNDsdU4}', 'uZMpWdOzZhVh': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_F1uUl8Py}', 'h84K3u8RCCiV': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_SAlDd2T6}', 'c0DHKU8K7F0M': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_aNt3dzvV}', 'IpKWgxHCAwzD': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_vNNUFeqg}', '6M3F3BYLI9d6': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_qyU2oub8}', 'zIkPNBX1am9j': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_S23NJfYl}', 'NujQa1cM9kXr': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_r6sNDBRf}', 'C88xyVgLYYc4': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_riAP4fvJ}', 'tLnKRgxuFVHN': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_iUr3Spfm}', 'H7951eZqN8Vu': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_SW0pyvoc}', 'JcTFjJEuqVIJ': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_R28IyQD2}', 'rBg1DQWi64PL': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_aALdguGb}', 'njYmPguFUtgy': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_knxCGRXg}', 'nUs4QURaftXM': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_f7eM6CZ2}', 'Jm0SbxuPDgYJ': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_lCyWXrzS}', 'KBmlb3ZWJPTX': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_o742Txym}', 'VJxrqGrMDb9D': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Kr7Xxxa7}', 'DSTinL4ZJpqy': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_H1SKtwlf}', 'j4yqLHYMYfK7': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Fy2juCqm}', 'FusGT40nnN3V': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_K6m63lVi}', 'nnb8AfRSQcTh': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_fjWTHmOI}', 'O0qqOPvqjiWO': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_JEjLNLN2}', 'cwS6OMc6m9WY': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_OCVSIkhN}', 'vqsYMQGtepUU': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_I9mY53rY}', 'MMxktobusSAW': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_wcIzMw05}', 'A2H2U3T7pF5Q': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_oMs6xVZ5}', 'ygNHeAbA4xIN': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_sMWqRf3P}', '6zgpDwdSAOgd': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_M1zdtE7v}', 'BMfgwoceGm7v': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_nFvdHlZC}', 'QQxuYppa0zyh': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_PDEQnRgP}', 'BbZkcNoLdf1d': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Un0j2ntL}', '27zSI81RxphZ': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_fOtv6NBu}', 'WQje8FOky6P4': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_V9Ibwof6}', 'ZlSztvmMIV9D': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_cFj2crjI}', 'Iyv0FYZIC6wv': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_2BHV1U9J}', 'PIX4pJ3072Ld': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_ly8WweWV}', 'cAInnUlE1Z1V': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_zZG2MRs5}', 'rMpD7g854F8l': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Jp3msWlv}', 'kEa9fQTjBCDc': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_LzCS1sD0}', '96KZXZEDwcqW': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_rj4T3VEM}', '90C4no86uaN0': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_AkGlBlr1}', 'Fn7q0pBVYfz5': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_bhoSHuww}', 'OlAywRjHwbql': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_J053FSCx}', 'jBx2lkhR73OV': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_iyE9u9RC}', 's65Rr43cWIfO': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_0KuOxW8i}', 'EFhxbTBynjHH': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_DOV9c9vg}', '0fl27x1jDhVw': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_WHmfprJx}', 'GodNiA1SKwRk': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_jPh0p3UQ}', '0QiydH14153S': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_UsbsoWcl}', 'AGy3ewA1W4fz': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_9RUzBz88}', 'Gi1dwhjlD5lp': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_dV5eh26Q}', '32ISB2G2rbQy': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_vGLgDmEO}', '3OoU3HUmuaLq': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_4t0aZoJ8}', 'lYODemC5joBR': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_HVcobM4l}', 't1VY3eHhlWn2': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_XRYMXBEB}', '2wI0rCj9Gc4X': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_jWas99sa}', 'eeD3Eq9D9qeC': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_YDLPnced}', 'zbue0FXBa4zl': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_mIGPkx2D}', 'z0ccfeYvjANQ': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_H1YDHX7e}', 'NdSbqerR24hx': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_R4i94UiN}', '0M4HOLz4ufSk': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_jC4DnejJ}', 'quwtODoE5uWj': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_tTE0VEjh}', 'U5MCOxvpoP1o': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_ohqMEKgh}', '3YYX7yoo1mcr': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_XVp78tBI}', 'WwgDMsqD5OcD': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_BPylqC8x}', '49d7ubGbdtBD': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_x4bZJjwK}', '9t27PWgdH65i': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_6kPAqnUM}', 'JVKhOMmPYtVd': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_SgiaeADQ}', 'yOLLPHXvGQro': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_VW2FnuQg}', 'QPX0zGRlcj4n': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_hQqp9xuF}', 'J2GFvqjd5tg7': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_35oJ14kS}', 'bCJmajSFPRw4': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_mG9MbaP4}', 'UeD2Am9VdmVV': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_WYVyxI8n}', 'OVTwjY7pB6Zi': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_P3DFL2pg}', 'FSmDEK59OgBc': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_KcbFJWfL}', 'Y14LGjESpaeQ': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_EK2Wwrfu}', '0hk9Qfr28pfm': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_NdZvFSO8}', 'ExJRFl92Iydb': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_L5Rco7vZ}', 'DNPmygOABptD': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_qvhwQPvS}', 'qiIp0rVEc5zM': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_1ZJQ3DPK}', 'p5X5zkhdvpr4': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_nASs2w9m}', 'wFULSfUyI3dE': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_jVU45hTT}', '17PtYpehq7KN': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_mYd37cH0}', 'zIOgSmsZZ1hV': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_DFE05fw9}', 'OEtTKKucMaHg': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_5Xebukq0}', 'gvgiDRR4l7PO': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_nWAl1P2H}', 'FC1Qy3EOJDmi': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_R25qtkWQ}', 'I1TTnu6CvQLg': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_2WZqftfo}', 'qBtYm4tlClOn': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_5DLEuA7x}', 'K1iWSLUA7TjU': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_noGLbQlN}', 'XDQfuzgYqvpD': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_9ApeptKU}', 'lucbamSPR6o6': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_LmSbT4hJ}', '7EHChnbyQkYA': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_HYEks4HJ}', 'sOL4BqCK7sFC': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_m6FEXo5e}', 'z9ek0EqjnblC': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_3gyWL6IK}', 'QDEW2hxzimEL': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_0VEWsk88}', 'pRHnMMIZHT08': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_ow3UWKX6}', 'jptkzdK8jtD3': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_WJZNvZZS}', 'NXW9YE1OIXZr': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_trB5on4j}', 'm6BzhExlSQu8': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_mbrPTOHP}', 'NSJ9awoddIaD': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_hK2XMOMR}', 'YJRCRpRKzTYu': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_1nEUGAyR}', 'UHjM5dyheO1g': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_RF9HsQXH}', '7cgL2cRgZKsh': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_KCXpAfGZ}', 'EpnSD0nRTXcc': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_atIpjH1l}', '6xHLCKRKKjNd': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_4B55YTom}', 'bXbeVHQGemB6': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_r6Vh39f7}', '6Vt8UD1nJvCf': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_VKCQnQ0Z}', 'v3dGNGOxAzEK': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_1MKjebf1}', 'xV4CIkUQWfQy': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_A5lniKJC}', 'yo6MyFO37eg2': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_NNOR8N0B}', 'Z4IT3S6h6C41': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_zNwldW6p}', 'rc9Pj5sJK2aT': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_R2dNr6F8}', 'D7K2FZm8xblo': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_EAViBuc4}', 'BSRpVJV7oPJ7': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_HT8E8NMd}', '7llltu8bHSZz': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_9cEYBSiQ}', 'nxBkkao1X7pU': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_QLUVIXD5}', 'osYcLaHH3w3h': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_c95X82BU}', 'LRs3sGuiuR0H': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_zgEuluZR}', 'gCQ5EdzEse0s': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_kqRVbogy}', 'orZmOn0840Eu': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_uHdIWhrb}', 'TnZHEcBJckdg': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_PFvdl20L}', 'N8CYH9gxiKjn': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_OJzTEcgd}', 'AlZ1u5DCVdTX': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_HgYjjpxP}', '5VP5jYtijjfB': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_E6M7b36T}', 'cpdluUmCYvTO': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_U5KHmNSJ}', 'SXjF53o7z1w5': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_4T5wS7CI}', 'YKh49Cq7J95c': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_jn7Ok2cv}', 'rsCOne2If67y': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_HZdAieMb}', '7whNnPVlbs6R': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_vwitLxL3}', '0zQMtusnatPy': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_mgSKf2J8}', '79DT5e7Zu60V': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_4oiez4CN}', 'u9MFMVzba7QF': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Mz7VmDdx}', 'yaiVVL9OOUov': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_CMgB5Mtv}', 'fBQyjNQgVFYa': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_uaLF9EzN}', 'AJFPfs4hpqi9': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_O8HJM5Jg}', 'otToAsehSohC': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_inYCAggq}', '8gvs825PvNAi': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_PGK7CsVV}', 'pxvSZk8fmjY7': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_ywmGeiBL}', 'Y9SJ35wrJQ96': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_HSlq5ajJ}', 'kGSl8hmBok2v': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_2WYXn5XZ}', 't2cLfQzHnjPF': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_fDkREvel}', 'bKfAXXVREByy': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_mbS3XP9O}', 'ymbbqTb0MyfA': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Jyp3wyX2}', 'bHIFc9KX9JDU': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_1HAuhW67}', 'kI5urDV1Lbao': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_sd03C1b0}', '0RW7AYPb3c3P': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_aLUWDKpF}', 'YmdgONKD8sG0': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Yb3IyZEF}', 'Rcs0egVus0Pj': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_ZfCTs7sm}', 'gtTqyrcSinxh': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_oyReCrC7}', 'WV5m5h2pMCSs': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_zPKvWX2P}', 'pLhbNOXXTy66': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_W0lfFuXu}', 'lK9Vk9Mubj2D': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_nrz2gM1Z}', 'BbqmnQcDuQum': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_h2kj4AKy}', 'J9LfQvgvqcfL': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_8sMX709Q}', 'bODeHNkvOvuo': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_6EdrgCxp}', 'Xy7K88imNnzJ': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_sNMUt0lb}', 'fCooUPgEJQAK': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_YpBV2S1s}', '5hWsBwdLCsLE': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_rslFyXwy}', 's7YgYYJhter3': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_uXrRTIGS}', 'SeJFvE6VqWsM': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_pbtmib1u}', '5e4lwIunlfpj': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_LfxPIlBy}', '1QycjAJBrUjL': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_TYW7c2N9}', 'UM5iRsUXivOY': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_D3nYtN1v}', 'Rs3q5CuPEj1O': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Vaza5jXc}', 'cMIpUz6VmfRA': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_ZJ7lKsOH}', 'RSYw0mMqmbjW': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_VkXeVBBN}', 'HSNTx1LIVC1g': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_AWaAKP7X}', 'I0Rbh3n4OY98': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_GsK9hEq2}', 'QlmklPBfUcL2': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_z2KEKpSK}', 'jwFpePserw86': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_XtaznnNA}', 'dksmUIoxigr2': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_0eurs2id}', 'EGjCWJqPMVwJ': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_PPXoJqwq}', 'frN2m9JUF440': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_qGdE1fRb}', 'mXHQD5uJTeuL': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_hrIXvyCM}', 'XlqpBxnuCSPT': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_oygYE1pO}', 'GFjE74czcnSj': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_TNlmv3Fl}', 'sgSN7UH5Ouex': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_a9pzlNvs}', 'r7ioonGscyLh': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_gTOXiOPi}', 'L5J9bYoYfdsN': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_IiHEMvAj}', 'U18V11waUNKN': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_UZlc5nAk}', '9E53hCTR7ZIA': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_m7GL3b9a}', 'QsVFVnYB4lva': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_Wf2CrNO1}', 'HWKExQKCZlXs': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_gwMdLomK}', 'biTYZ1lu1hrd': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_zhsqW5FQ}', 'IZ8HRJyVRW42': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_ZOuv7HLD}', 'TyvSPz75qzIS': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_SYBkONdq}', 'riVrfGTVDGhw': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_NGe5sqlm}', 'Nby8NeGcaWap': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_hwjI26QS}', 'ZlF18lq5x4OA': 'LKSHL{th1s_is_a_trivial_t4sk_on_a_CTF_8saSWAP8}'}
| 13,407
| 13,407
| 0.8506
| 2,201
| 13,407
| 4.454339
| 0.185825
| 0.183599
| 0.224398
| 0.244798
| 0.591595
| 0.591595
| 0.591595
| 0.591595
| 0.591595
| 0
| 0
| 0.082424
| 0.02991
| 13,407
| 1
| 13,407
| 13,407
| 0.671382
| 0
| 0
| 0
| 0
| 0
| 0.880072
| 0.701074
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.